code
stringlengths 3
1.01M
| repo_name
stringlengths 5
116
| path
stringlengths 3
311
| language
stringclasses 30
values | license
stringclasses 15
values | size
int64 3
1.01M
|
|---|---|---|---|---|---|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.clients.problems;
import java.util.HashSet;
import java.util.Set;
import org.apache.flex.compiler.config.ICompilerProblemSettings;
import org.apache.flex.compiler.internal.config.ICompilerSettings;
import org.apache.flex.compiler.problems.AbstractDeprecatedAPIProblem;
import org.apache.flex.compiler.problems.ArrayCastProblem;
import org.apache.flex.compiler.problems.AssignmentInConditionalProblem;
import org.apache.flex.compiler.problems.ConstNotInitializedProblem;
import org.apache.flex.compiler.problems.DateCastProblem;
import org.apache.flex.compiler.problems.DeprecatedConfigurationOptionProblem;
import org.apache.flex.compiler.problems.DuplicateQNameInSourcePathProblem;
import org.apache.flex.compiler.problems.DuplicateVariableDefinitionProblem;
import org.apache.flex.compiler.problems.ICompilerProblem;
import org.apache.flex.compiler.problems.IllogicalComparionWithNaNProblem;
import org.apache.flex.compiler.problems.IllogicalComparisonWithUndefinedProblem;
import org.apache.flex.compiler.problems.InstanceOfProblem;
import org.apache.flex.compiler.problems.NullUsedWhereOtherExpectedProblem;
import org.apache.flex.compiler.problems.OverlappingSourcePathProblem;
import org.apache.flex.compiler.problems.ScopedToDefaultNamespaceProblem;
import org.apache.flex.compiler.problems.SemanticWarningProblem;
import org.apache.flex.compiler.problems.StrictSemanticsProblem;
import org.apache.flex.compiler.problems.VariableHasNoTypeDeclarationProblem;
/**
* A problem filter that implements filtering based the values of the following
* compiler options:
*
* -allow-source-path-overlap
* -show-actionscript-warnings
* -show-binding-warnings
* -show-deprecation-warnings
* -show-multiple-definition-warnings
* -show-unused-type-selector-warnings
* -strict
* -warn-assignment-within-conditional
* -warn-bad-array-cast
* -warn-bad-date-cast
* -warn-bad-nan-comparision
* -warn-bad-null-assignment
* -warn-bad-undefined-comparision
* -warn-const-not-initialized
* -warn-duplicate-variable-def
* -warn-instanceof-changes
* -warn-missing-namespace-decl
* -warn-no-type-decl
*
*/
public class ProblemSettingsFilter implements IProblemFilter
{
/**
* Create a filter based on problem settings.
*
* @param problemSettings The settings, may not be null.
*/
public ProblemSettingsFilter(ICompilerProblemSettings problemSettings)
{
assert problemSettings != null : "Settings may not be null";
this.problemSettings = problemSettings;
init();
}
private final ICompilerProblemSettings problemSettings;
private final Set<Class<? extends ICompilerProblem>> filter = new HashSet<Class<? extends ICompilerProblem>>();
@Override
public boolean accept(ICompilerProblem p)
{
for (Class<?> filterClass : filter)
{
// This is equivalent to (p instanceof <filter class>)
if (filterClass.isInstance(p))
return false;
}
// accept it.
return true;
}
/**
* initialize filter from problem settings.
*/
private void init()
{
setShowStrictSemantics(problemSettings.isStrict());
setShowWarnings(problemSettings.showActionScriptWarnings());
setShowDeprecationWarnings(problemSettings.showDeprecationWarnings());
setShowBindingWarnings(problemSettings.showBindingWarnings());
setShowMultipleDefinitionWarnings(problemSettings.showMultipleDefinitionWarnings());
setShowUnusedTypeSelectorWarnings(problemSettings.showUnusedTypeSelectorWarnings());
setAllowSourcePathOverlapWarnings(problemSettings.isSourcePathOverlapAllowed());
setShowActionScriptWarnings();
}
/**
* Enable or disable strict semantics mode diagnostics.
* @param isStrict - if true, strict semantics mode
* diagnostics will appear in the filtered diagnostics.
*/
private void setShowStrictSemantics(boolean isStrict)
{
setShowProblemByClass(StrictSemanticsProblem.class, isStrict);
}
/**
* Enable or disable semantic warnings.
* @param showWarnings - if true, semantic warnings
* will appear in the filtered diagnostics.
*/
private void setShowWarnings(boolean showWarnings)
{
setShowProblemByClass(SemanticWarningProblem.class, showWarnings);
}
private void setAllowSourcePathOverlapWarnings(boolean isSourcePathOverlapAllowed)
{
setShowProblemByClass(OverlappingSourcePathProblem.class, !isSourcePathOverlapAllowed);
}
private void setShowUnusedTypeSelectorWarnings(boolean showUnusedTypeSelectorWarnings)
{
// TODO: call setShowProblemByClass() with warnings relating to
// unused type selectors.
// CMP-1422
}
private void setShowBindingWarnings(boolean showBindingWarnings)
{
// TODO: call setShowProblemByClass() with problems relating to
// binding warnings.
// CMP-1424
}
private void setShowMultipleDefinitionWarnings(boolean b)
{
setShowProblemByClass(DuplicateQNameInSourcePathProblem.class, b);
}
private void setShowActionScriptWarnings()
{
// Associate compiler problems with the compiler -warn-xxx options
// that enable/disable them.
setShowActionScriptWarning(AssignmentInConditionalProblem.class,
ICompilerSettings.WARN_ASSIGNMENT_WITHIN_CONDITIONAL);
setShowActionScriptWarning(ArrayCastProblem.class,
ICompilerSettings.WARN_BAD_ARRAY_CAST);
setShowActionScriptWarning(DateCastProblem.class,
ICompilerSettings.WARN_BAD_DATE_CAST);
setShowActionScriptWarning(IllogicalComparionWithNaNProblem.class,
ICompilerSettings.WARN_BAD_NAN_COMPARISON);
setShowActionScriptWarning(NullUsedWhereOtherExpectedProblem.class,
ICompilerSettings.WARN_BAD_NULL_ASSIGNMENT);
setShowActionScriptWarning(IllogicalComparisonWithUndefinedProblem.class,
ICompilerSettings.WARN_BAD_UNDEFINED_COMPARISON);
setShowActionScriptWarning(ConstNotInitializedProblem.class,
ICompilerSettings.WARN_CONST_NOT_INITIALIZED);
setShowActionScriptWarning(DuplicateVariableDefinitionProblem.class,
ICompilerSettings.WARN_DUPLICATE_VARIABLE_DEF);
setShowActionScriptWarning(InstanceOfProblem.class,
ICompilerSettings.WARN_INSTANCEOF_CHANGES);
setShowActionScriptWarning(ScopedToDefaultNamespaceProblem.class,
ICompilerSettings.WARN_MISSING_NAMESPACE_DECL);
setShowActionScriptWarning(VariableHasNoTypeDeclarationProblem.class,
ICompilerSettings.WARN_NO_TYPE_DECL);
}
/**
* Hide/show actionscript warnings based on compiler option settings.
*
* @param problem
* @param warningCode
*/
private void setShowActionScriptWarning(Class<? extends ICompilerProblem> problem,
int warningCode)
{
setShowProblemByClass(problem,
problemSettings.checkActionScriptWarning(warningCode));
}
/**
* Enable or disable deprecation warnings.
* @param showWarnings - if true, deprecation warnings
* will appear in the filtered diagnostics.
*/
private void setShowDeprecationWarnings(boolean showWarnings)
{
setShowProblemByClass(AbstractDeprecatedAPIProblem.class, showWarnings);
setShowProblemByClass(DeprecatedConfigurationOptionProblem.class, showWarnings);
}
/**
* Enable or disable a compiler problem class.
*
* @param clazz the class to enable the problem.
* @param enable if true the problem is enabled and NOT filtered, otherwise
* the problem is filtered.
*/
private void setShowProblemByClass(Class<? extends ICompilerProblem> clazz, boolean enable)
{
if (enable)
filter.remove(clazz);
else
filter.add(clazz);
}
}
|
adufilie/flex-falcon
|
compiler/src/org/apache/flex/compiler/clients/problems/ProblemSettingsFilter.java
|
Java
|
apache-2.0
| 8,966
|
##########################GO-LICENSE-START################################
# Copyright 2014 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################GO-LICENSE-END##################################
require File.join(File.dirname(__FILE__), "/../../../spec_helper")
describe "admin/jobs/index.html.erb" do
include GoUtil, FormUI
before(:each) do
@pipeline = PipelineConfigMother.createPipelineConfig("pipeline-name", "stage-name", ["job-1", "job-2", "job-3"].to_java(java.lang.String))
job_1 = @pipeline.get(0).getJobs().get(0)
job_1.resources().add(Resource.new("resource-1"))
job_1.resources().add(Resource.new("resource-2"))
job_1.setRunOnAllAgents(true)
assigns[:pipeline] = @pipeline
assigns[:stage] = @pipeline.get(0)
assigns[:jobs] = @pipeline.get(0).getJobs()
assigns[:cruise_config] = @cruise_config = CruiseConfig.new
@cruise_config.addPipeline("group-1", @pipeline)
set(@cruise_config, "md5", "abc")
in_params(:stage_parent => "pipelines", :pipeline_name => "pipeline-name", :action => "index", :controller => "admin/jobs", :stage_name => "stage-name")
end
it "should show appropriate headers in table" do
render 'admin/jobs/index.html.erb'
response.body.should have_tag("table.list_table") do
with_tag("th", "Job")
with_tag("th", "Resources")
with_tag("th", "Run on all")
with_tag("th", "Remove")
end
end
it "should show job listing" do
template.stub(:random_dom_id).and_return("delete_job_random_id")
render 'admin/jobs/index.html.erb'
response.body.should have_tag("table.list_table") do
with_tag("tr") do
with_tag("td", "job-1") do
with_tag("a[href=?]", admin_tasks_listing_path(:job_name => "job-1", :current_tab => "tasks"))
end
with_tag("td", "resource-1, resource-2")
with_tag("td", "Yes")
with_tag("td span.icon_remove[id=?]", 'trigger_delete_job_random_id')
with_tag("script[type='text/javascript']", /Util.escapeDotsFromId\('trigger_delete_job_random_id #warning_prompt'\)/)
with_tag("div#warning_prompt[style='display:none;']", /Are you sure you want to delete the job 'job-1' \?/)
end
with_tag("tr") do
with_tag("td", "job-2") do
with_tag("a[href=?]", admin_tasks_listing_path(:job_name => "job-2", :current_tab => "tasks"))
end
with_tag("td", "")
with_tag("td", "No")
with_tag("td span.icon_remove[id=?]", 'trigger_delete_job_random_id')
with_tag("script[type='text/javascript']", /Util.escapeDotsFromId\('trigger_delete_job_random_id #warning_prompt'\)/)
with_tag("div#warning_prompt[style='display:none;']", /Are you sure you want to delete the job 'job-2' \?/)
end
with_tag("tr") do
with_tag("td", "job-3")do
with_tag("a[href=?]", admin_tasks_listing_path(:job_name => "job-3", :current_tab => "tasks"))
end
with_tag("td", "")
with_tag("td", "No")
with_tag("td span.icon_remove[id=?]", 'trigger_delete_job_random_id')
with_tag("script[type='text/javascript']", /Util.escapeDotsFromId\('trigger_delete_job_random_id #warning_prompt'\)/)
with_tag("div#warning_prompt[style='display:none;']", /Are you sure you want to delete the job 'job-3' \?/)
end
end
response.body.should_not have_tag(".fieldWithErrors")
response.body.should_not have_tag(".form_error")
end
end
|
turbine-rpowers/gocd-add-agent-sandbox-config
|
server/webapp/WEB-INF/rails/spec/views/admin/jobs/index_html_spec.rb
|
Ruby
|
apache-2.0
| 3,985
|
<?php
/**
* Contains all client objects for the AudienceSegmentService service.
*
* PHP version 5
*
* Copyright 2013, Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @package GoogleApiAdsDfp
* @subpackage v201211
* @category WebServices
* @copyright 2013, Google Inc. All Rights Reserved.
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License,
* Version 2.0
* @author Vincent Tsao
*/
/** Required classes. **/
require_once "Google/Api/Ads/Dfp/Lib/DfpSoapClient.php";
if (!class_exists("ApiError", FALSE)) {
/**
* The API error base class that provides details about an error that occurred
* while processing a service request.
*
* <p>The OGNL field path is provided for parsers to identify the request data
* element that may have caused the error.</p>
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ApiError {
/**
* @access public
* @var string
*/
public $fieldPath;
/**
* @access public
* @var string
*/
public $trigger;
/**
* @access public
* @var string
*/
public $errorString;
/**
* @access public
* @var string
*/
public $ApiErrorType;
private $_parameterMap = array (
"ApiError.Type" => "ApiErrorType",
);
/**
* Provided for setting non-php-standard named variables
* @param $var Variable name to set
* @param $value Value to set
*/
public function __set($var, $value) { $this->{$this->_parameterMap[$var]} = $value; }
/**
* Provided for getting non-php-standard named variables
* @param $var Variable name to get.
* @return mixed Variable value
*/
public function __get($var) {
if (!array_key_exists($var, $this->_parameterMap)) {
return NULL;
} else {
return $this->{$this->_parameterMap[$var]};
}
}
/**
* Provided for getting non-php-standard named variables
* @return array parameter map
*/
protected function getParameterMap() {
return $this->_parameterMap;
}
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ApiError";
}
public function __construct($fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("ApiVersionError", FALSE)) {
/**
* Errors related to the usage of API versions.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ApiVersionError extends ApiError {
/**
* @access public
* @var tnsApiVersionErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ApiVersionError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("ApplicationException", FALSE)) {
/**
* Base class for exceptions.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ApplicationException {
/**
* @access public
* @var string
*/
public $message;
/**
* @access public
* @var string
*/
public $ApplicationExceptionType;
private $_parameterMap = array (
"ApplicationException.Type" => "ApplicationExceptionType",
);
/**
* Provided for setting non-php-standard named variables
* @param $var Variable name to set
* @param $value Value to set
*/
public function __set($var, $value) { $this->{$this->_parameterMap[$var]} = $value; }
/**
* Provided for getting non-php-standard named variables
* @param $var Variable name to get.
* @return mixed Variable value
*/
public function __get($var) {
if (!array_key_exists($var, $this->_parameterMap)) {
return NULL;
} else {
return $this->{$this->_parameterMap[$var]};
}
}
/**
* Provided for getting non-php-standard named variables
* @return array parameter map
*/
protected function getParameterMap() {
return $this->_parameterMap;
}
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ApplicationException";
}
public function __construct($message = NULL, $ApplicationExceptionType = NULL) {
$this->message = $message;
$this->ApplicationExceptionType = $ApplicationExceptionType;
}
}}
if (!class_exists("AudienceSegmentPage", FALSE)) {
/**
* Represents a page of {@link AudienceSegment} objects.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class AudienceSegmentPage {
/**
* @access public
* @var integer
*/
public $totalResultSetSize;
/**
* @access public
* @var integer
*/
public $startIndex;
/**
* @access public
* @var AudienceSegment[]
*/
public $results;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "AudienceSegmentPage";
}
public function __construct($totalResultSetSize = NULL, $startIndex = NULL, $results = NULL) {
$this->totalResultSetSize = $totalResultSetSize;
$this->startIndex = $startIndex;
$this->results = $results;
}
}}
if (!class_exists("Authentication", FALSE)) {
/**
* A representation of the authentication protocols that can be used.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class Authentication {
/**
* @access public
* @var string
*/
public $AuthenticationType;
private $_parameterMap = array (
"Authentication.Type" => "AuthenticationType",
);
/**
* Provided for setting non-php-standard named variables
* @param $var Variable name to set
* @param $value Value to set
*/
public function __set($var, $value) { $this->{$this->_parameterMap[$var]} = $value; }
/**
* Provided for getting non-php-standard named variables
* @param $var Variable name to get.
* @return mixed Variable value
*/
public function __get($var) {
if (!array_key_exists($var, $this->_parameterMap)) {
return NULL;
} else {
return $this->{$this->_parameterMap[$var]};
}
}
/**
* Provided for getting non-php-standard named variables
* @return array parameter map
*/
protected function getParameterMap() {
return $this->_parameterMap;
}
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "Authentication";
}
public function __construct($AuthenticationType = NULL) {
$this->AuthenticationType = $AuthenticationType;
}
}}
if (!class_exists("AuthenticationError", FALSE)) {
/**
* An error for an exception that occurred when authenticating.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class AuthenticationError extends ApiError {
/**
* @access public
* @var tnsAuthenticationErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "AuthenticationError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("ClientLogin", FALSE)) {
/**
* The credentials for the {@code ClientLogin} API authentication protocol.
*
* See {@link http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html}.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ClientLogin extends Authentication {
/**
* @access public
* @var string
*/
public $token;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ClientLogin";
}
public function __construct($token = NULL, $AuthenticationType = NULL) {
parent::__construct();
$this->token = $token;
$this->AuthenticationType = $AuthenticationType;
}
}}
if (!class_exists("CommonError", FALSE)) {
/**
* A place for common errors that can be used across services.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class CommonError extends ApiError {
/**
* @access public
* @var tnsCommonErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "CommonError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("Date", FALSE)) {
/**
* Represents a date.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class Date {
/**
* @access public
* @var integer
*/
public $year;
/**
* @access public
* @var integer
*/
public $month;
/**
* @access public
* @var integer
*/
public $day;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "Date";
}
public function __construct($year = NULL, $month = NULL, $day = NULL) {
$this->year = $year;
$this->month = $month;
$this->day = $day;
}
}}
if (!class_exists("DfpDateTime", FALSE)) {
/**
* Represents a date combined with the time of day.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class DfpDateTime {
/**
* @access public
* @var Date
*/
public $date;
/**
* @access public
* @var integer
*/
public $hour;
/**
* @access public
* @var integer
*/
public $minute;
/**
* @access public
* @var integer
*/
public $second;
/**
* @access public
* @var string
*/
public $timeZoneID;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "DateTime";
}
public function __construct($date = NULL, $hour = NULL, $minute = NULL, $second = NULL, $timeZoneID = NULL) {
$this->date = $date;
$this->hour = $hour;
$this->minute = $minute;
$this->second = $second;
$this->timeZoneID = $timeZoneID;
}
}}
if (!class_exists("FeatureError", FALSE)) {
/**
* Errors related to feature management. If you attempt using a feature that is not available to
* the current network you'll receive a FeatureError with the missing feature as the trigger.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class FeatureError extends ApiError {
/**
* @access public
* @var tnsFeatureErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "FeatureError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("InternalApiError", FALSE)) {
/**
* Indicates that a server-side error has occured. {@code InternalApiError}s
* are generally not the result of an invalid request or message sent by the
* client.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class InternalApiError extends ApiError {
/**
* @access public
* @var tnsInternalApiErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "InternalApiError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("NotNullError", FALSE)) {
/**
* Caused by supplying a null value for an attribute that cannot be null.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class NotNullError extends ApiError {
/**
* @access public
* @var tnsNotNullErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "NotNullError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("DfpOAuth", FALSE)) {
/**
* The credentials for the {@code OAuth} authentication protocol.
*
* See {@link http://oauth.net/}.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class DfpOAuth extends Authentication {
/**
* @access public
* @var string
*/
public $parameters;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "OAuth";
}
public function __construct($parameters = NULL, $AuthenticationType = NULL) {
parent::__construct();
$this->parameters = $parameters;
$this->AuthenticationType = $AuthenticationType;
}
}}
if (!class_exists("PermissionError", FALSE)) {
/**
* Errors related to incorrect permission.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PermissionError extends ApiError {
/**
* @access public
* @var tnsPermissionErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PermissionError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("PublisherQueryLanguageContextError", FALSE)) {
/**
* An error that occurs while executing a PQL query contained in
* a {@link Statement} object.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PublisherQueryLanguageContextError extends ApiError {
/**
* @access public
* @var tnsPublisherQueryLanguageContextErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PublisherQueryLanguageContextError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("PublisherQueryLanguageSyntaxError", FALSE)) {
/**
* An error that occurs while parsing a PQL query contained in a
* {@link Statement} object.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PublisherQueryLanguageSyntaxError extends ApiError {
/**
* @access public
* @var tnsPublisherQueryLanguageSyntaxErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PublisherQueryLanguageSyntaxError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("QuotaError", FALSE)) {
/**
* Describes a client-side error on which a user is attempting
* to perform an action to which they have no quota remaining.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class QuotaError extends ApiError {
/**
* @access public
* @var tnsQuotaErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "QuotaError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("RequiredError", FALSE)) {
/**
* Errors due to missing required field.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class RequiredError extends ApiError {
/**
* @access public
* @var tnsRequiredErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "RequiredError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("AudienceSegment", FALSE)) {
/**
* An {@link AudienceSegment} represents audience segment object.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class AudienceSegment {
/**
* @access public
* @var integer
*/
public $id;
/**
* @access public
* @var string
*/
public $name;
/**
* @access public
* @var string
*/
public $description;
/**
* @access public
* @var tnsAudienceSegmentStatus
*/
public $status;
/**
* @access public
* @var integer
*/
public $size;
/**
* @access public
* @var string
*/
public $AudienceSegmentType;
private $_parameterMap = array (
"AudienceSegment.Type" => "AudienceSegmentType",
);
/**
* Provided for setting non-php-standard named variables
* @param $var Variable name to set
* @param $value Value to set
*/
public function __set($var, $value) { $this->{$this->_parameterMap[$var]} = $value; }
/**
* Provided for getting non-php-standard named variables
* @param $var Variable name to get.
* @return mixed Variable value
*/
public function __get($var) {
if (!array_key_exists($var, $this->_parameterMap)) {
return NULL;
} else {
return $this->{$this->_parameterMap[$var]};
}
}
/**
* Provided for getting non-php-standard named variables
* @return array parameter map
*/
protected function getParameterMap() {
return $this->_parameterMap;
}
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "AudienceSegment";
}
public function __construct($id = NULL, $name = NULL, $description = NULL, $status = NULL, $size = NULL, $AudienceSegmentType = NULL) {
$this->id = $id;
$this->name = $name;
$this->description = $description;
$this->status = $status;
$this->size = $size;
$this->AudienceSegmentType = $AudienceSegmentType;
}
}}
if (!class_exists("ServerError", FALSE)) {
/**
* Errors related to the server.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ServerError extends ApiError {
/**
* @access public
* @var tnsServerErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ServerError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("SoapRequestHeader", FALSE)) {
/**
* Represents the SOAP request header used by API requests.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class SoapRequestHeader {
/**
* @access public
* @var string
*/
public $networkCode;
/**
* @access public
* @var string
*/
public $applicationName;
/**
* @access public
* @var Authentication
*/
public $authentication;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "SoapRequestHeader";
}
public function __construct($networkCode = NULL, $applicationName = NULL, $authentication = NULL) {
$this->networkCode = $networkCode;
$this->applicationName = $applicationName;
$this->authentication = $authentication;
}
}}
if (!class_exists("SoapResponseHeader", FALSE)) {
/**
* Represents the SOAP request header used by API responses.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class SoapResponseHeader {
/**
* @access public
* @var string
*/
public $requestId;
/**
* @access public
* @var integer
*/
public $responseTime;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "SoapResponseHeader";
}
public function __construct($requestId = NULL, $responseTime = NULL) {
$this->requestId = $requestId;
$this->responseTime = $responseTime;
}
}}
if (!class_exists("Statement", FALSE)) {
/**
* Captures the {@code WHERE}, {@code ORDER BY} and {@code LIMIT} clauses of a
* PQL query. Statements are typically used to retrieve objects of a predefined
* domain type, which makes SELECT clause unnecessary.
* <p>
* An example query text might be {@code "WHERE status = 'ACTIVE' ORDER BY id
* LIMIT 30"}.
* </p>
* <p>
* Statements support bind variables. These are substitutes for literals
* and can be thought of as input parameters to a PQL query.
* </p>
* <p>
* An example of such a query might be {@code "WHERE id = :idValue"}.
* </p>
* <p>
* Statements also support use of the LIKE keyword. This provides partial and
* wildcard string matching.
* </p>
* <p>
* An example of such a query might be {@code "WHERE name LIKE 'startswith%'"}.
* </p>
* If using an API version newer than V201010, the value for the variable
* idValue must then be set with an object of type {@link Value} and is one of
* {@link NumberValue}, {@link TextValue} or {@link BooleanValue}.
* <p>
* If using an API version older than or equal to V201010, the value for the
* variable idValue must then be set with an object of type {@link Param} and is
* one of {@link DoubleParam}, {@link LongParam} or {@link StringParam}.
* </p>
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class Statement {
/**
* @access public
* @var string
*/
public $query;
/**
* @access public
* @var String_ValueMapEntry[]
*/
public $values;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "Statement";
}
public function __construct($query = NULL, $values = NULL) {
$this->query = $query;
$this->values = $values;
}
}}
if (!class_exists("StatementError", FALSE)) {
/**
* An error that occurs while parsing {@link Statement} objects.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class StatementError extends ApiError {
/**
* @access public
* @var tnsStatementErrorReason
*/
public $reason;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "StatementError";
}
public function __construct($reason = NULL, $fieldPath = NULL, $trigger = NULL, $errorString = NULL, $ApiErrorType = NULL) {
parent::__construct();
$this->reason = $reason;
$this->fieldPath = $fieldPath;
$this->trigger = $trigger;
$this->errorString = $errorString;
$this->ApiErrorType = $ApiErrorType;
}
}}
if (!class_exists("String_ValueMapEntry", FALSE)) {
/**
* This represents an entry in a map with a key of type String
* and value of type Value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class String_ValueMapEntry {
/**
* @access public
* @var string
*/
public $key;
/**
* @access public
* @var Value
*/
public $value;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "String_ValueMapEntry";
}
public function __construct($key = NULL, $value = NULL) {
$this->key = $key;
$this->value = $value;
}
}}
if (!class_exists("Value", FALSE)) {
/**
* {@code Value} represents a value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class Value {
/**
* @access public
* @var string
*/
public $ValueType;
private $_parameterMap = array (
"Value.Type" => "ValueType",
);
/**
* Provided for setting non-php-standard named variables
* @param $var Variable name to set
* @param $value Value to set
*/
public function __set($var, $value) { $this->{$this->_parameterMap[$var]} = $value; }
/**
* Provided for getting non-php-standard named variables
* @param $var Variable name to get.
* @return mixed Variable value
*/
public function __get($var) {
if (!array_key_exists($var, $this->_parameterMap)) {
return NULL;
} else {
return $this->{$this->_parameterMap[$var]};
}
}
/**
* Provided for getting non-php-standard named variables
* @return array parameter map
*/
protected function getParameterMap() {
return $this->_parameterMap;
}
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "Value";
}
public function __construct($ValueType = NULL) {
$this->ValueType = $ValueType;
}
}}
if (!class_exists("ApiVersionErrorReason", FALSE)) {
/**
* Indicates that the operation is not allowed in the version the request
* was made in.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ApiVersionErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ApiVersionError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("AuthenticationErrorReason", FALSE)) {
/**
* The SOAP message contains a request header with an ambiguous definition
* of the authentication header fields. This means either the {@code
* authToken} and {@code oAuthToken} fields were both null or both were
* specified. Exactly one value should be specified with each request.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class AuthenticationErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "AuthenticationError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("CommonErrorReason", FALSE)) {
/**
* Describes reasons for common errors
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class CommonErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "CommonError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("FeatureErrorReason", FALSE)) {
/**
* A feature is being used that is not enabled on the current network.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class FeatureErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "FeatureError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("InternalApiErrorReason", FALSE)) {
/**
* The single reason for the internal API error.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class InternalApiErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "InternalApiError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("NotNullErrorReason", FALSE)) {
/**
* The reasons for the target error.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class NotNullErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "NotNullError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("PermissionErrorReason", FALSE)) {
/**
* Describes reasons for permission errors.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PermissionErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PermissionError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("PublisherQueryLanguageContextErrorReason", FALSE)) {
/**
* The reasons for the target error.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PublisherQueryLanguageContextErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PublisherQueryLanguageContextError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("PublisherQueryLanguageSyntaxErrorReason", FALSE)) {
/**
* The reasons for the target error.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class PublisherQueryLanguageSyntaxErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "PublisherQueryLanguageSyntaxError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("QuotaErrorReason", FALSE)) {
/**
* The number of requests made per second is too high and has exceeded the
* allowable limit. The recommended approach to handle this error is to wait
* about 5 seconds and then retry the request. Note that this does not
* guarantee the request will succeed. If it fails again, try increasing the
* wait time.
* <p>
* Another way to mitigate this error is to limit requests to 2 per second.
* Once again this does not guarantee that every request will succeed, but
* may help reduce the number of times you receive this error.
* </p>
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class QuotaErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "QuotaError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("RequiredErrorReason", FALSE)) {
/**
* The reasons for the target error.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class RequiredErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "RequiredError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("AudienceSegmentStatus", FALSE)) {
/**
* Specifies the statuses for {@link AudienceSegment} objects.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class AudienceSegmentStatus {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "AudienceSegment.Status";
}
public function __construct() {
}
}}
if (!class_exists("ServerErrorReason", FALSE)) {
/**
* Describes reasons for server errors
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ServerErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ServerError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("StatementErrorReason", FALSE)) {
/**
* A bind variable has not been bound to a value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class StatementErrorReason {
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "StatementError.Reason";
}
public function __construct() {
}
}}
if (!class_exists("getAudienceSegmentsByStatement", FALSE)) {
/**
* Gets an {@link AudienceSegmentPage} of {@link AudienceSegment} objects that satisfy the given
* {@link Statement#query}. The following fields are supported for filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th>
* <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code id}</td>
* <td>{@link AudienceSegment#id}</td>
* </tr>
* <tr>
* <td>{@code name}</td>
* <td>{@link AudienceSegment#name}</td>
* </tr>
* <tr>
* <td>{@code status}</td>
* <td>{@link AudienceSegment#status}</td>
* </tr>
* <tr>
* <td>{@code type}</td>
* <td>{@link AudienceSegment#type}</td>
* </tr>
* <tr>
* <td>{@code size}</td>
* <td>{@link AudienceSegment#size}</td>
* </tr>
* <tr>
* <td>{@code dataProviderName}</td>
* <td>{@link AudienceSegmentDataProvider#name}</td>
* </tr>
* <tr>
* <td>{@code approvalStatus}</td>
* <td>{@link ThirdPartyAudienceSegment#approvalStatus}</td>
* </tr>
* <tr>
* <td>{@code cost}</td>
* <td>{@link ThirdPartyAudienceSegment#cost}</td>
* </tr>
* <tr>
* <td>{@code startDateTime}</td>
* <td>{@link ThirdPartyAudienceSegment#startDateTime}</td>
* </tr>
* <tr>
* <td>{@code endDateTime}</td>
* <td>{@link ThirdPartyAudienceSegment#endDateTime}</td>
* </tr>
* </table>
*
* @param filterStatement a Publisher Query Language statement used to filter a set of audience
* segments
* @return the audience segments that match the given filter
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class getAudienceSegmentsByStatement {
/**
* @access public
* @var Statement
*/
public $filterStatement;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "";
}
public function __construct($filterStatement = NULL) {
$this->filterStatement = $filterStatement;
}
}}
if (!class_exists("getAudienceSegmentsByStatementResponse", FALSE)) {
/**
*
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class getAudienceSegmentsByStatementResponse {
/**
* @access public
* @var AudienceSegmentPage
*/
public $rval;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "";
}
public function __construct($rval = NULL) {
$this->rval = $rval;
}
}}
if (!class_exists("ApiException", FALSE)) {
/**
* Exception class for holding a list of service errors.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class ApiException extends ApplicationException {
/**
* @access public
* @var ApiError[]
*/
public $errors;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "ApiException";
}
public function __construct($errors = NULL, $message = NULL, $ApplicationExceptionType = NULL) {
parent::__construct();
$this->errors = $errors;
$this->message = $message;
$this->ApplicationExceptionType = $ApplicationExceptionType;
}
}}
if (!class_exists("BooleanValue", FALSE)) {
/**
* Contains a boolean value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class BooleanValue extends Value {
/**
* @access public
* @var boolean
*/
public $value;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "BooleanValue";
}
public function __construct($value = NULL, $ValueType = NULL) {
parent::__construct();
$this->value = $value;
$this->ValueType = $ValueType;
}
}}
if (!class_exists("DateTimeValue", FALSE)) {
/**
* Contains a date-time value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class DateTimeValue extends Value {
/**
* @access public
* @var DateTime
*/
public $value;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "DateTimeValue";
}
public function __construct($value = NULL, $ValueType = NULL) {
parent::__construct();
$this->value = $value;
$this->ValueType = $ValueType;
}
}}
if (!class_exists("NumberValue", FALSE)) {
/**
* Contains a numeric value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class NumberValue extends Value {
/**
* @access public
* @var string
*/
public $value;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "NumberValue";
}
public function __construct($value = NULL, $ValueType = NULL) {
parent::__construct();
$this->value = $value;
$this->ValueType = $ValueType;
}
}}
if (!class_exists("TextValue", FALSE)) {
/**
* Contains a string value.
* @package GoogleApiAdsDfp
* @subpackage v201211
*/
class TextValue extends Value {
/**
* @access public
* @var string
*/
public $value;
/**
* Gets the namesapce of this class
* @return the namespace of this class
*/
public function getNamespace() {
return "https://www.google.com/apis/ads/publisher/v201211";
}
/**
* Gets the xsi:type name of this class
* @return the xsi:type name of this class
*/
public function getXsiTypeName() {
return "TextValue";
}
public function __construct($value = NULL, $ValueType = NULL) {
parent::__construct();
$this->value = $value;
$this->ValueType = $ValueType;
}
}}
if (!class_exists("AudienceSegmentService", FALSE)) {
/**
* AudienceSegmentService
* @package GoogleApiAdsDfp
* @subpackage v201211
* @author WSDLInterpreter
*/
class AudienceSegmentService extends DfpSoapClient {
/**
* Default class map for wsdl=>php
* @access private
* @var array
*/
public static $classmap = array(
"DateTime" => "DfpDateTime",
"Location" => "DfpLocation",
"OAuth" => "DfpOAuth",
"ApiError" => "ApiError",
"ApiException" => "ApiException",
"ApplicationException" => "ApplicationException",
"ApiVersionError" => "ApiVersionError",
"AudienceSegmentPage" => "AudienceSegmentPage",
"Authentication" => "Authentication",
"AuthenticationError" => "AuthenticationError",
"BooleanValue" => "BooleanValue",
"Value" => "Value",
"ClientLogin" => "ClientLogin",
"CommonError" => "CommonError",
"Date" => "Date",
"DateTimeValue" => "DateTimeValue",
"FeatureError" => "FeatureError",
"InternalApiError" => "InternalApiError",
"NotNullError" => "NotNullError",
"NumberValue" => "NumberValue",
"PermissionError" => "PermissionError",
"PublisherQueryLanguageContextError" => "PublisherQueryLanguageContextError",
"PublisherQueryLanguageSyntaxError" => "PublisherQueryLanguageSyntaxError",
"QuotaError" => "QuotaError",
"RequiredError" => "RequiredError",
"AudienceSegment" => "AudienceSegment",
"ServerError" => "ServerError",
"SoapRequestHeader" => "SoapRequestHeader",
"SoapResponseHeader" => "SoapResponseHeader",
"Statement" => "Statement",
"StatementError" => "StatementError",
"String_ValueMapEntry" => "String_ValueMapEntry",
"TextValue" => "TextValue",
"ApiVersionError.Reason" => "ApiVersionErrorReason",
"AuthenticationError.Reason" => "AuthenticationErrorReason",
"CommonError.Reason" => "CommonErrorReason",
"FeatureError.Reason" => "FeatureErrorReason",
"InternalApiError.Reason" => "InternalApiErrorReason",
"NotNullError.Reason" => "NotNullErrorReason",
"PermissionError.Reason" => "PermissionErrorReason",
"PublisherQueryLanguageContextError.Reason" => "PublisherQueryLanguageContextErrorReason",
"PublisherQueryLanguageSyntaxError.Reason" => "PublisherQueryLanguageSyntaxErrorReason",
"QuotaError.Reason" => "QuotaErrorReason",
"RequiredError.Reason" => "RequiredErrorReason",
"AudienceSegment.Status" => "AudienceSegmentStatus",
"ServerError.Reason" => "ServerErrorReason",
"StatementError.Reason" => "StatementErrorReason",
"getAudienceSegmentsByStatement" => "getAudienceSegmentsByStatement",
"getAudienceSegmentsByStatementResponse" => "getAudienceSegmentsByStatementResponse",
);
/**
* The endpoint of the service
* @var string
*/
public static $endpoint = "https://www.google.com/apis/ads/publisher/v201211/AudienceSegmentService";
/**
* Constructor using wsdl location and options array
* @param string $wsdl WSDL location for this service
* @param array $options Options for the SoapClient
*/
public function __construct($wsdl=null, $options, $user) {
$options["classmap"] = AudienceSegmentService::$classmap;
parent::__construct($wsdl, $options, $user, 'AudienceSegmentService', 'https://www.google.com/apis/ads/publisher/v201211');
}
/**
* Gets an {@link AudienceSegmentPage} of {@link AudienceSegment} objects that satisfy the given
* {@link Statement#query}. The following fields are supported for filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th>
* <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code id}</td>
* <td>{@link AudienceSegment#id}</td>
* </tr>
* <tr>
* <td>{@code name}</td>
* <td>{@link AudienceSegment#name}</td>
* </tr>
* <tr>
* <td>{@code status}</td>
* <td>{@link AudienceSegment#status}</td>
* </tr>
* <tr>
* <td>{@code type}</td>
* <td>{@link AudienceSegment#type}</td>
* </tr>
* <tr>
* <td>{@code size}</td>
* <td>{@link AudienceSegment#size}</td>
* </tr>
* <tr>
* <td>{@code dataProviderName}</td>
* <td>{@link AudienceSegmentDataProvider#name}</td>
* </tr>
* <tr>
* <td>{@code approvalStatus}</td>
* <td>{@link ThirdPartyAudienceSegment#approvalStatus}</td>
* </tr>
* <tr>
* <td>{@code cost}</td>
* <td>{@link ThirdPartyAudienceSegment#cost}</td>
* </tr>
* <tr>
* <td>{@code startDateTime}</td>
* <td>{@link ThirdPartyAudienceSegment#startDateTime}</td>
* </tr>
* <tr>
* <td>{@code endDateTime}</td>
* <td>{@link ThirdPartyAudienceSegment#endDateTime}</td>
* </tr>
* </table>
*
* @param filterStatement a Publisher Query Language statement used to filter a set of audience
* segments
* @return the audience segments that match the given filter
*/
public function getAudienceSegmentsByStatement($filterStatement) {
$arg = new getAudienceSegmentsByStatement($filterStatement);
$result = $this->__soapCall("getAudienceSegmentsByStatement", array($arg));
return $result->rval;
}
}}
|
venkyanthony/google-api-dfp-php
|
src/Google/Api/Ads/Dfp/v201211/AudienceSegmentService.php
|
PHP
|
apache-2.0
| 54,197
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<!-- Documenting T:NAnt.Compression.Tasks.UnZipTask-->
<head>
<meta http-equiv="Content-Language" content="en-ca" />
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<link rel="stylesheet" type="text/css" href="../style.css" />
<title><unzip> Task</title>
</head>
<body>
<table width="100%" border="0" cellspacing="0" cellpadding="2" class="NavBar">
<tr>
<td class="NavBar-Cell">
<a href="http://nant.sourceforge.net">
<b>NAnt</b>
</a>
<img alt="->" src="../images/arrow.gif" />
<a href="../index.html">Help</a>
<img alt="->" src="../images/arrow.gif" />
<a href="../tasks/index.html">Task Reference</a>
<img alt="->" src="../images/arrow.gif" /> <unzip></td>
<td class="NavBar-Cell" align="right">
v0.91-alpha2</td>
</tr>
</table>
<h1><unzip></h1>
<p class="topicstatus">[This is preliminary documentation and subject to change.]</p>
<p> Extracts files from a zip archive. </p>
<p> Uses <a href="http://www.icsharpcode.net/OpenSource/SharpZipLib/">#ziplib</a> (SharpZipLib), an open source Zip/GZip library written entirely in C#. </p>
<h3>Parameters</h3>
<div class="table">
<table>
<tr>
<th>Attribute</th>
<th style="text-align: center;">Type</th>
<th>Description</th>
<th style="text-align: center;">Required</th>
</tr>
<tr>
<td valign="top" class="required">zipfile</td>
<td style="text-align: center;">file</td>
<td> The archive file to expand. </td>
<td style="text-align: center;">True</td>
</tr>
<tr>
<td valign="top">encoding</td>
<td style="text-align: center;">
<a href="http://msdn.microsoft.com/library/default.asp?url=/library/en-us/cpref/html/frlrfSystemTextEncodingClassTopic.asp">Encoding</a>
</td>
<td> The character encoding that has been used for filenames inside the zip file. The default is the system's OEM code page. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">todir</td>
<td style="text-align: center;">directory</td>
<td> The directory where the expanded files should be stored. The default is the project base directory. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">failonerror</td>
<td style="text-align: center;">bool</td>
<td> Determines if task failure stops the build, or is just reported. The default is <b>true</b>. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">if</td>
<td style="text-align: center;">bool</td>
<td> If <b>true</b> then the task will be executed; otherwise, skipped. The default is <b>true</b>. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">overwrite</td>
<td style="text-align: center;">bool</td>
<td> Overwrite files, even if they are newer than the corresponding entries in the archive. The default is <b>true</b>. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">unless</td>
<td style="text-align: center;">bool</td>
<td> Opposite of <code>if</code>. If <b>false</b> then the task will be executed; otherwise, skipped. The default is <b>false</b>. </td>
<td style="text-align: center;">False</td>
</tr>
<tr>
<td valign="top">verbose</td>
<td style="text-align: center;">bool</td>
<td> Determines whether the task should report detailed build log messages. The default is <b>false</b>. </td>
<td style="text-align: center;">False</td>
</tr>
</table>
</div>
<h3>Examples</h3>
<ul class="examples">
<li>
<p>Extracts all the file from the zip, preserving the directory structure.</p>
<pre class="code">
<unzip zipfile="backup.zip"/>
</pre>
</li>
</ul>
<h3>Requirements</h3>
<div style="margin-left: 20px;">
<b>Assembly:</b> NAnt.CompressionTasks (0.91.3881.0)
</div>
</body>
</html>
|
secdec/codepulse
|
dotnet-tracer/build/nant-0.91-alpha2/doc/help/tasks/unzip.html
|
HTML
|
apache-2.0
| 4,718
|
<!--
Off-screen preview of the next card in order to pre-determine the target
height for the card content transition animation.
-->
<div class="conversation-skin-future-tutor-card" aria-hidden="true">
<div class="conversation-skin-tutor-card-content">
<div class="conversation-skin-tutor-card-top-section">
<div class="conversation-skin-tutor-card-top-content"
angular-html-bind="upcomingContentHtml">
</div>
</div>
<div ng-if="upcomingInlineInteractionHtml"
class="conversation-skin-inline-interaction"
angular-html-bind="upcomingInlineInteractionHtml">
</div>
<div ng-if="!upcomingInlineInteractionHtml"
class="conversation-skin-inline-interaction">
<div ng-if="!isViewportNarrow()" style="padding: 6px 12px;">
<[upcomingInteractionInstructions]>
<i class="material-icons md-18" style="position: relative; top: 3px;"></i>
</div>
<div ng-if="isViewportNarrow()">
<md-button class="instructions-button">
<[upcomingInteractionInstructions]>
</md-button>
</div>
</div>
</div>
</div>
<div role="main" style="margin: 0 auto; position: relative;" ng-if="hasFullyLoaded">
<progress-dots num-dots="numProgressDots" class="conversation-skin-progress-dots">
</progress-dots>
<div class="conversation-skin-cards-container conversation-skin-animate-cards"
ng-class="{'animate-to-two-cards': isAnimatingToTwoCards, 'animate-to-one-card': isAnimatingToOneCard}">
<div class="conversation-skin-tutor-card-container"
ng-show="!isViewportNarrow() || isScreenNarrowAndShowingTutorCard()"
ng-class="{'conversation-skin-animate-tutor-card-on-narrow':
isViewportNarrow() && isCurrentSupplementalCardNonempty(),
'conversation-skin-tutor-card-alone':
isViewportNarrow() && !isCurrentSupplementalCardNonempty()}">
<tutor-card on-click-continue-button="showUpcomingCard()"
on-submit-answer="submitAnswer(answer, rulesService)"
on-dismiss="showSupplementalCardIfScreenIsNarrow()"
start-card-change-animation="startCardChangeAnimation">
</tutor-card>
</div>
<div ng-if="isCurrentSupplementalCardNonempty()"
class="conversation-skin-supplemental-card-container">
<img class="conversation-skin-oppia-avatar show-tutor-card"
ng-if="isScreenNarrowAndShowingSupplementalCard()"
ng-src="<[::OPPIA_AVATAR_IMAGE_URL]>"
ng-click="showTutorCardIfScreenIsNarrow()" alt="">
<div ng-if="isScreenNarrowAndShowingTutorCard()" class="supplemental-mask"></div>
<supplemental-card on-click-continue-button="showUpcomingCard()"
on-submit-answer="submitAnswer(answer, rulesService)">
</supplemental-card>
</div>
</div>
</div>
<div ng-if="isOnTerminalCard() && isCurrentCardAtEndOfTranscript()"
class="conversation-skin-final-summary">
<div ng-if="isLoggedIn && !isInPreviewMode" class="conversation-skin-final-ratings">
<div class="conversation-skin-final-ratings-header" translate="I18N_PLAYER_RATE_EXPLORATION">
</div>
<div popover-placement="bottom" popover-template="'popover/feedback'" popover-trigger="click">
<rating-display rating-value="userRating" is-editable="true" on-edit="submitUserRating"
class="conversation-skin-final-ratings-display">
</rating-display>
</div>
</div>
<div ng-if="recommendedExplorationSummaries !== null && (collectionSummary || recommendedExplorationSummaries.length > 0)"
class="conversation-skin-final-recommendations">
<div ng-if="recommendedExplorationSummaries.length > 0">
<span class="conversation-skin-final-recommendations-header"
translate="I18N_PLAYER_RECOMMEND_EXPLORATIONS">
</span>
<div class="oppia-exp-summary-tiles-container conversation-skin-recommended-activities-container">
<exploration-summary-tile ng-repeat="exp in (recommendedExplorationSummaries|limitTo:3) track by $index"
collection-id="collectionId"
exploration-id="exp.id"
exploration-title="exp.title"
last-updated-msec="exp.last_updated_msec"
objective="exp.objective"
category="exp.category"
ratings="exp.ratings"
thumbnail-icon-url="exp.thumbnail_icon_url"
thumbnail-bg-color="exp.thumbnail_bg_color"
num-views="exp.num_views"
is-community-owned="exp.community_owned"
style="margin-left: 10px;">
</exploration-summary-tile>
</div>
<div ng-if="collectionSummary" class="conversation-skin-back-to-collection-container">
<a ng-href="/collection/<[collectionId]>" class="conversation-skin-back-to-collection" translate="I18N_PLAYER_BACK_TO_COLLECTION"></a>
</div>
</div>
<div ng-if="collectionSummary && recommendedExplorationSummaries.length === 0">
<span class="conversation-skin-final-recommendations-header"
translate="I18N_PLAYER_RETURN_TO_COLLECTION"
translate-values="{collectionTitle: collectionSummary.title}">
</span>
<div class="oppia-exp-summary-tiles-container conversation-skin-recommended-activities-container">
<collection-summary-tile collection-id="collectionSummary.id"
collection-title="collectionSummary.title"
last-updated-msec="collectionSummary.last_updated_msec"
node-count="collectionSummary.node_count"
objective="collectionSummary.objective"
thumbnail-icon-url="collectionSummary.thumbnail_icon_url"
thumbnail-bg-color="collectionSummary.thumbnail_bg_color"
style="margin-left: 10px;">
</collection-summary-tile>
</div>
</div>
</div>
</div>
<!-- These styles must be embedded within the directive script tags, since
otherwise they will interfere with the iframed conversation skin directive.
-->
<style>
/*
Note that this affects both the learner mode and the 'editor preview'
mode.
*/
html, body {
background: #e8e7e3 no-repeat center center fixed;
background-size: cover;
color: rgba(0,0,0,0.87);
font-family: 'Roboto', Arial, sans-serif;
font-size: 1.0em;
}
/* All classes below should start with .conversation-skin */
.conversation-skin-back-to-collection-container {
margin-top: 25px;
margin-bottom: 92px;
text-align: center;
}
.conversation-skin-back-to-collection {
color: #064b46;
}
.conversation-skin-progress-dots {
left: 0;
margin: 0 auto;
position: fixed;
right: 0;
top: 56px;
z-index: 1;
}
.conversation-skin-oppia-avatar, .conversation-skin-user-avatar {
height: 36px;
position: absolute;
top: 0;
width: 36px;
z-index: 1;
}
.conversation-skin-oppia-feedback-container .conversation-skin-oppia-avatar {
height: 100px;
left: -87px;
top: -20px;
width: 100px;
}
.conversation-skin-cards-container {
align-items: flex-start;
display: -webkit-flex;
display: flex;
justify-content: center;
margin: 0 auto;
max-width: 1400px;
padding: 56px 14px;
width: 100%;
}
.conversation-skin-tutor-card-container {
flex-shrink: 10000;
max-width: 560px;
/* NOTE TO DEVELOPERS: If min-width is changed, max-width in media query
below should be changed to match. */
min-width: 360px;
width: 100%;
z-index: 1;
}
/* Some mobile devices have CSS width below 360px, use a responsive min-width
when under 360px to prevent pushing things offscreen. */
@media(max-width: 360px) {
.conversation-skin-tutor-card-container {
min-width: 100vw;
}
}
.conversation-skin-future-tutor-card {
left: -30000px;
max-width: 560px;
position: fixed;
top: -30000px;
}
.conversation-skin-supplemental-card-container {
box-shadow: 0 1px 2px rgba(0, 0, 0, 0.24), 0 1px 3px rgba(0, 0, 0, 0.12);
flex-shrink: 1;
margin-left: 12px;
max-width: 1000px;
min-width: 560px;
position: relative;
z-index: 2;
}
/* These rules must be kept in sync with corresponding rules in oppia.css
(those with '.oppia-state-content-display-html > p,
.form-control.oppia-rte-content > div > p' selectors specifying
the same line-height, margin-top and margin-bottom attributes)
*/
.conversation-skin-oppia-feedback-content > p,
.conversation-skin-learner-answer-content > p,
.conversation-skin-help-card-content > p {
line-height: 28px;
margin-bottom: 18px;
margin-top: 18px;
}
.conversation-skin-tutor-card-top-content > p:first-child,
.conversation-skin-oppia-feedback-content > p:first-child,
.conversation-skin-learner-answer-content > p:first-child,
.conversation-skin-help-card-content > p:first-child {
margin-top: 0px;
}
.conversation-skin-tutor-card-top-content > p:last-child,
.conversation-skin-oppia-feedback-content > p:last-child,
.conversation-skin-learner-answer-content > p:last-child,
.conversation-skin-help-card-content > p:last-child {
margin-bottom: 0px;
}
.conversation-skin-tutor-card-top-content,
.conversation-skin-oppia-feedback-content,
.conversation-skin-learner-answer-content,
.conversation-skin-help-card-content {
border-radius: 2px;
display: inline-block;
max-width: 100%;
position: relative;
text-align: left;
}
.conversation-skin-tutor-card-top-content,
.conversation-skin-oppia-feedback-content,
.conversation-skin-learner-answer-content {
margin-bottom: 12px;
}
.conversation-skin-tutor-card-top-content,
.conversation-skin-help-card-content {
padding: 12px;
}
.conversation-skin-oppia-feedback-content,
.conversation-skin-learner-answer-content {
padding: 8px 12px;
}
.conversation-skin-tutor-card-top-content,
.conversation-skin-oppia-feedback-content,
.conversation-skin-help-card-content {
background-color: rgba(224,242,241,1);
}
.conversation-skin-learner-answer-content {
background-color: rgba(236,239,241,1);
border-bottom-right-radius: 0;
}
.conversation-skin-tutor-card-top-content,
.conversation-skin-oppia-feedback-content,
.conversation-skin-help-card-content {
border-top-left-radius: 0;
}
.conversation-skin-learner-answer,
.conversation-skin-oppia-feedback {
word-wrap: break-word;
}
.conversation-skin-feedback-dot-one,
.conversation-skin-feedback-dot-two,
.conversation-skin-feedback-dot-three {
background-color: #666;
display: inline-block;
height: 2px;
width: 2px;
}
.conversation-skin-feedback-dot-one {
-moz-animation-delay: 0.0s;
-moz-animation: dot 1.0s infinite;
-webkit-animation-delay: 0.0s;
-webkit-animation: dot 1.0s infinite;
animation-delay: 0.0s;
animation: dot 1.0s infinite;
opacity: 0;
}
.conversation-skin-feedback-dot-two {
-moz-animation-delay: 0.2s;
-moz-animation: dot 1.0s infinite;
-webkit-animation-delay: 0.2s;
-webkit-animation: dot 1.0s infinite;
animation-delay: 0.2s;
animation: dot 1.0s infinite;
opacity: 0;
}
.conversation-skin-feedback-dot-three {
-moz-animation-delay: 0.4s;
-moz-animation: dot 1.0s infinite;
-webkit-animation-delay: 0.4s;
-webkit-animation: dot 1.0s infinite;
animation-delay: 0.4s;
animation: dot 1.0s infinite;
opacity: 0;
}
.conversation-skin-final-ratings {
text-align: center;
}
.conversation-skin-final-ratings-header {
color: #064b46;
margin: 20px 0;
}
.conversation-skin-final-ratings-display {
color: #064b46;
letter-spacing: 10px;
}
.conversation-skin-final-recommendations {
margin-top: 50px;
text-align: center;
}
.conversation-skin-final-recommendations-header {
color: #064b46;
font-size: 150%;
}
.conversation-skin-oppia-feedback.ng-enter {
opacity: 0;
-webkit-transition: all .5s;
transition: all .5s;
}
.conversation-skin-oppia-feedback.ng-enter.ng-enter-active {
opacity: 1;
}
.conversation-skin-final-summary {
margin-bottom: 25px;
}
.conversation-skin-recommended-activities-container {
padding-top: 10px;
}
@media screen and (max-width: 959px) {
.conversation-skin-cards-container {
display: block;
padding: 65px 0px;
position: relative;
width: 100%;
}
.conversation-skin-tutor-card-container {
position: absolute;
left: 0px;
margin: 0 auto;
right: 0px;
top: 40px;
width: 100%;
z-index: 15;
}
.conversation-skin-tutor-card-alone {
position: relative;
top: 0px;
}
.conversation-skin-supplemental-card-container {
left: 50%;
margin: 0 auto;
max-width: 959px;
min-width: 360px;
position: absolute;
transform: translateX(-50%);
}
.conversation-skin-supplemental-card-container .supplemental-mask {
background-color: #ccc;
height: 100%;
left: 0px;
opacity: 0.7;
position: absolute;
top: 0px;
width: 100%;
z-index: 14;
}
.conversation-skin-user-avatar,
.conversation-skin-oppia-avatar {
display: none;
}
.conversation-skin-oppia-avatar.show-tutor-card {
border-radius: 50%;
box-shadow: 0 3px 3px grey, 0 5px 3px grey;
display: block;
height: 40px;
left: 100%;
position: absolute;
position: absolute;
transform: translate(-100%, -100%);
width: 40px;
z-index: 16;
}
.conversation-skin-oppia-avatar.show-tutor-card:hover {
opacity: 0.8;
}
}
</style>
|
MAKOSCAFEE/oppia
|
core/templates/dev/head/pages/exploration_player/conversation_skin_directive.html
|
HTML
|
apache-2.0
| 14,227
|
---
layout: default
description: Examples of audit event logging
title: Auditing Events
---
Audit Events
============
{% include hint-ee.md feature="Auditing" %}
Unless otherwise noted, all events are logged to their respective topics at the
`info` level. To suppress events from a given topic, set the topic to the `warn`
level or `higher`. By default, each topic will be set to the most verbose level
at which events are logged (either `debug` or `info`) so that all events are
logged.
Authentication
--------------
### Unknown authentication methods
```
2016-10-03 15:44:23 | server1 | audit-authentication | n/a | database1 | 127.0.0.1:61525 | n/a | unknown authentication method | /_api/version
```
This message will occur when a request contains an `Authorization` header with
an unknown authentication method. Typically, only `basic` and `bearer` are
accepted.
### Missing credentials
```
2016-10-03 15:39:49 | server1 | audit-authentication | n/a | database1 | 127.0.0.1:61498 | n/a | credentials missing | /_api/version
```
This message will occur when authentication is enabled and a request omits an
`Authorization` header. Note that this may naturally occur when making an
initial request to e.g. log in or load the web interface. For this reason, we
have logged these low-priority events at the `debug` level.
### Wrong credentials
```
2016-10-03 15:47:26 | server1 | audit-authentication | n/a | database1 | 127.0.0.1:61528 | http basic | credentials wrong | /_api/version
```
or
```
2016-10-03 17:21:22 | server1 | audit-authentication | root | database1 | 127.0.0.1:64214 | http jwt | user 'root' wrong credentials | /_open/auth
```
Please note, that the user given as fourth part is the user that requested
the login. In general it may be unavailable.
This message will occur when a user makes an attempt to log in with incorrect
credentials, or passes a JWT with invalid credentials.
### JWT login succeeded
```
2016-10-03 17:21:22 | server1 | audit-authentication | root | database1 | 127.0.0.1:64214 | http jwt | user 'root' authenticated | /_open/auth
```
Please note, that the user given as fourth part is the user that requested
the login.
The message will occur when a user successfully logs in and is given a JWT token
for further use.
Authorization
-------------
### User not authorized to access database
```
2016-10-03 16:20:52 | server1 | audit-authorization | user1 | database2 | 127.0.0.1:62262 | http basic | not authorized | /_api/version
```
This message will occur when a user attempts to access a database in a manner in
which they have not been granted access.
Databases
---------
### Create a database
```
2016-10-04 15:33:25 | server1 | audit-database | user1 | database1 | 127.0.0.1:56920 | http basic | create database 'database1' | ok | /_api/database
```
This message will occur whenever a user attempts to create a database. If
successful, the status will read `ok`, otherwise `failed`.
### Drop a database
```
2016-10-04 15:33:25 | server1 | audit-database | user1 | database1 | 127.0.0.1:56920 | http basic | delete database 'database1' | ok | /_api/database
```
This message will occur whenever a user attempts to drop a database. If
successful, the status will read `ok`, otherwise `failed`.
Collections
-----------
### Create a collection
```
2016-10-05 17:35:57 | server1 | audit-collection | user1 | database1 | 127.0.0.1:51294 | http basic | create collection 'collection1' | ok | /_api/collection
```
This message will occur whenever a user attempts to create a collection. If
successful, the status will read `ok`, otherwise `failed`.
### Truncate a collection
```
2016-10-05 17:36:08 | server1 | audit-collection | user1 | database1 | 127.0.0.1:51294 | http basic | truncate collection 'collection1' | ok | /_api/collection/collection1/truncate
```
This message will occur whenever a user attempts to truncate a collection. If
successful, the status will read `ok`, otherwise `failed`.
### Drop a collection
```
2016-10-05 17:36:30 | server1 | audit-collection | user1 | database1 | 127.0.0.1:51294 | http basic | delete collection 'collection1' | ok | /_api/collection/collection1
```
This message will occur whenever a user attempts to drop a collection. If
successful, the status will read `ok`, otherwise `failed`.
Indexes
-------
### Create an index
```
2016-10-05 18:19:40 | server1 | audit-collection | user1 | database1 | 127.0.0.1:52467 | http basic | create index in 'collection1' | ok | {"fields":["a"],"sparse":false,"type":"persistent","unique":false} | /_api/index?collection=collection1
```
This message will occur whenever a user attempts to create an index. If
successful, the status will read `ok`, otherwise `failed`.
### Drop an index
```
2016-10-05 18:18:28 | server1 | audit-collection | user1 | database1 | 127.0.0.1:52464 | http basic | drop index 'collection1/44051' | ok | /_api/index/collection1/44051
```
This message will occur whenever a user attempts to drop an index. If
successful, the status will read `ok`, otherwise `failed`.
Documents
---------
If statistics are enabled, the system will periodically perform several document
operations on a few system collections. These low-priority operations are logged
to the `audit-document` topic at the `debug` level.
### Reading a single document
```
2016-10-04 12:27:55 | server1 | audit-document | user1 | database1 | 127.0.0.1:53699 | http basic | read document in 'collection1' | ok | /_api/document/collection1
```
This message will occur whenever a user attempts to read a document. If
successful, the status will read `ok`, otherwise `failed`.
### Creating a single document
```
2016-10-04 12:27:55 | server1 | audit-document | user1 | database1 | 127.0.0.1:53699 | http basic | create document in 'collection1' | ok | /_api/document/collection1
```
This message will occur whenever a user attempts to create a document. If
successful, the status will read `ok`, otherwise `failed`.
### Replacing a single document
```
2016-10-04 12:28:08 | server1 | audit-document | user1 | database1 | 127.0.0.1:53699 | http basic | replace document 'collection1/21456' | ok | /_api/document/collection1/21456?ignoreRevs=false
```
This message will occur whenever a user attempts to replace a document. If
successful, the status will read `ok`, otherwise `failed`.
### Modifying a single document
```
2016-10-04 12:28:15 | server1 | audit-document | user1 | database1 | 127.0.0.1:53699 | http basic | modify document 'collection1/21456' | ok | /_api/document/collection1/21456?keepNull=true&ignoreRevs=false
```
This message will occur whenever a user attempts to update a document. If
successful, the status will read `ok`, otherwise `failed`.
### Deleting a single document
```
2016-10-04 12:28:23 | server1 | audit-document | user1 | database1 | 127.0.0.1:53699 | http basic | delete document 'collection1/21456' | ok | /_api/document/collection1/21456?ignoreRevs=false
```
This message will occur whenever a user attempts to delete a document. If
successful, the status will read `ok`, otherwise `failed`.
Queries
-------
```
2016-10-06 12:12:10 | server1 | audit-document | user1 | database1 | 127.0.0.1:54232 | http basic | query document | ok | for i in collection1 return i | /_api/cursor
```
This message will occur whenever a user attempts to execute a query. If
successful, the status will read `ok`, otherwise `failed`.
Hot Backups
-----------
There are three operations which are put into the audit log with respect
to Hot Backups.
### Creating a Hot Backup
```
2020-01-21 15:29:06 | tux | audit-hotbackup | root | n/a | (internal) | n/a | Hotbackup taken with ID 2020-01-21T15:29:06Z_a98422de-03ab-4b94-8ed9-e084bfd4bae1, result: 0
```
This message will occur whenever a user attempts to create a Hot Backup.
If successful, the status will read `0`, otherwise some numerical error code.
### Restoring a Hot Backup
```
2020-01-21 15:29:42 | tux | audit-hotbackup | root | n/a | (internal) | n/a | Hotbackup restored with ID 2020-01-21T15.29.06Z_a98422de-03ab-4b94-8ed9-e084bfd4bae1, result: 0
```
This message will occur whenever a user attempts to restore from a Hot Backup.
If successful, the status will read `0`, otherwise some numerical error code.
### Deleting a Hot Backup
```
2020-01-21 15:32:37 | tux | audit-hotbackup | root | n/a | (internal) | n/a | Hotbackup deleted with ID 2020-01-21T15.32.27Z_cf1e3cb1-32c0-41d2-9a3f-528c9b43cbf9, result: 0
```
This message will occur whenever a user attempts to delete a Hot Backup.
If successful, the status will read `0`, otherwise some numerical error code.
|
arangodb/docs
|
3.9/security-auditing-audit-events.md
|
Markdown
|
apache-2.0
| 8,631
|
/*
* Copyright 2011 Intel Corporation.
*
* This program is licensed under the terms and conditions of the
* Apache License, version 2.0. The full text of the Apache License is at
* http://www.apache.org/licenses/LICENSE-2.0
*/
/*
* This file was generated by qdbusxml2cpp version 0.7
* Command line was: qdbusxml2cpp -c CameraIfAdaptor -a cameraifadaptor.h:cameraifadaptor.cpp com.meego.app.camera.xml
*
* qdbusxml2cpp is Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
*
* This is an auto-generated file.
* Do not edit! All changes made to it will be lost.
*/
#include "cameraifadaptor.h"
#include <QtCore/QMetaObject>
#include <QtCore/QByteArray>
#include <QtCore/QList>
#include <QtCore/QMap>
#include <QtCore/QString>
#include <QtCore/QStringList>
#include <QtCore/QVariant>
/*
* Implementation of adaptor class CameraIfAdaptor
*/
CameraIfAdaptor::CameraIfAdaptor(QObject *parent)
: QDBusAbstractAdaptor(parent)
{
// constructor
setAutoRelaySignals(true);
}
CameraIfAdaptor::~CameraIfAdaptor()
{
// destructor
}
|
dudochkin-victor/gogoo-app-camera
|
plugin/cameraifadaptor.cpp
|
C++
|
apache-2.0
| 1,075
|
# OneTwoThree
Repository for OneTwoThree Project
|
hkh412/OneTwoThree_Deprecated
|
README.md
|
Markdown
|
apache-2.0
| 49
|
/* This C file is generated by NIT to compile module syntax. */
#include "syntax._sep.h"
static const char LOCATE_syntax___SrcModuleLoader___file_type[] = "syntax::SrcModuleLoader::(mmloader::ModuleLoader::file_type)";
val_t syntax___SrcModuleLoader___file_type(val_t p0){
struct {struct stack_frame_t me;} fra;
val_t REGB0;
val_t tmp;
static val_t once_value_1; /* Once value */
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 33;
fra.me.meth = LOCATE_syntax___SrcModuleLoader___file_type;
fra.me.has_broke = 0;
fra.me.REG_size = 1;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[0] = p0;
/* syntax/syntax.nit:33 */
if (!once_value_1) {
fra.me.REG[0] = BOX_NativeString("nit");
REGB0 = TAG_Int(3);
fra.me.REG[0] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[0])(fra.me.REG[0], REGB0);
once_value_1 = fra.me.REG[0];
register_static_object(&once_value_1);
} else fra.me.REG[0] = once_value_1;
fra.me.REG[0] = fra.me.REG[0];
goto label2;
label2: while(0);
stack_frame_head = fra.me.prev;
return fra.me.REG[0];
}
static const char LOCATE_syntax___SrcModuleLoader___parse_file[] = "syntax::SrcModuleLoader::(mmloader::ModuleLoader::parse_file)";
val_t syntax___SrcModuleLoader___parse_file(val_t p0, val_t p1, val_t p2, val_t p3, val_t p4, val_t p5){
struct {struct stack_frame_t me; val_t MORE_REG[7];} fra;
val_t REGB0;
val_t REGB1;
val_t REGB2;
val_t tmp;
static val_t once_value_4; /* Once value */
static val_t once_value_5; /* Once value */
static val_t once_value_6; /* Once value */
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 35;
fra.me.meth = LOCATE_syntax___SrcModuleLoader___parse_file;
fra.me.has_broke = 0;
fra.me.REG_size = 8;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[1] = NIT_NULL;
fra.me.REG[2] = NIT_NULL;
fra.me.REG[3] = NIT_NULL;
fra.me.REG[4] = NIT_NULL;
fra.me.REG[5] = NIT_NULL;
fra.me.REG[6] = NIT_NULL;
fra.me.REG[7] = NIT_NULL;
fra.me.REG[0] = p0;
fra.me.REG[1] = p1;
fra.me.REG[2] = p2;
fra.me.REG[3] = p3;
fra.me.REG[4] = p4;
fra.me.REG[5] = p5;
/* syntax/syntax.nit:37 */
fra.me.REG[6] = CALL_standard___string___Object___to_s(fra.me.REG[4])(fra.me.REG[4]);
/* ../lib/standard/collection/array.nit:24 */
REGB0 = TAG_Bool(ATTR_standard___collection___array___AbstractArrayRead____length(fra.me.REG[6])!=NIT_NULL);
if (UNTAG_Bool(REGB0)) {
} else {
nit_abort("Uninitialized attribute %s", "_length", LOCATE_standard___collection___array, 24);
}
REGB0 = ATTR_standard___collection___array___AbstractArrayRead____length(fra.me.REG[6]);
/* syntax/syntax.nit:37 */
REGB1 = TAG_Int(0);
REGB2 = TAG_Bool(VAL_ISA(REGB1, VTCOLOR_standard___kernel___Comparable___OTHER(REGB0), VTID_standard___kernel___Comparable___OTHER(REGB0))) /*cast OTHER*/;
if (UNTAG_Bool(REGB2)) {
} else {
nit_abort("Cast failed", NULL, LOCATE_standard___kernel, 0);
}
/* ../lib/standard/kernel.nit:244 */
REGB1 = TAG_Bool(UNTAG_Int(REGB0)>UNTAG_Int(REGB1));
/* syntax/syntax.nit:37 */
if (UNTAG_Bool(REGB1)) {
fra.me.REG[6] = CALL_standard___string___Object___to_s(fra.me.REG[4])(fra.me.REG[4]);
REGB1 = TAG_Int(0);
REGB1 = CALL_standard___collection___abstract_collection___SequenceRead_____bra(fra.me.REG[6])(fra.me.REG[6], REGB1);
REGB1 = CALL_standard___kernel___Char___is_lower(REGB1)(REGB1);
} else {
REGB0 = TAG_Bool(0);
REGB1 = REGB0;
}
fra.me.REG[6] = REGB1;
/* syntax/syntax.nit:38 */
fra.me.REG[7] = CALL_standard___string___Object___to_s(fra.me.REG[4])(fra.me.REG[4]);
CALL_standard___collection___abstract_collection___Collection___iterate(fra.me.REG[7])(fra.me.REG[7], (&(fra.me)), ((fun_t)OC_syntax___SrcModuleLoader___parse_file_1));
switch ((&(fra.me))->has_broke) {
case 0: break;
case 1: (&(fra.me))->has_broke = 0; goto label3;
}
label3: while(0);
/* syntax/syntax.nit:43 */
REGB1 = TAG_Bool(!UNTAG_Bool(fra.me.REG[6]));
if (UNTAG_Bool(REGB1)) {
/* syntax/syntax.nit:44 */
REGB1 = TAG_Int(5);
fra.me.REG[6] = NEW_Array_standard___collection___array___Array___with_capacity(REGB1);
if (!once_value_4) {
fra.me.REG[7] = BOX_NativeString("");
REGB1 = TAG_Int(0);
fra.me.REG[7] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[7])(fra.me.REG[7], REGB1);
once_value_4 = fra.me.REG[7];
register_static_object(&once_value_4);
} else fra.me.REG[7] = once_value_4;
fra.me.REG[7] = fra.me.REG[7];
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[6])(fra.me.REG[6], fra.me.REG[7]);
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[6])(fra.me.REG[6], fra.me.REG[3]);
if (!once_value_5) {
fra.me.REG[7] = BOX_NativeString(": Error module name \"");
REGB1 = TAG_Int(21);
fra.me.REG[7] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[7])(fra.me.REG[7], REGB1);
once_value_5 = fra.me.REG[7];
register_static_object(&once_value_5);
} else fra.me.REG[7] = once_value_5;
fra.me.REG[7] = fra.me.REG[7];
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[6])(fra.me.REG[6], fra.me.REG[7]);
fra.me.REG[7] = CALL_standard___string___Object___to_s(fra.me.REG[4])(fra.me.REG[4]);
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[6])(fra.me.REG[6], fra.me.REG[7]);
if (!once_value_6) {
fra.me.REG[7] = BOX_NativeString("\", must start with a lower case letter and contain only letters, digits and '_'.");
REGB1 = TAG_Int(80);
fra.me.REG[7] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[7])(fra.me.REG[7], REGB1);
once_value_6 = fra.me.REG[7];
register_static_object(&once_value_6);
} else fra.me.REG[7] = once_value_6;
fra.me.REG[7] = fra.me.REG[7];
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[6])(fra.me.REG[6], fra.me.REG[7]);
fra.me.REG[6] = CALL_standard___string___Object___to_s(fra.me.REG[6])(fra.me.REG[6]);
CALL_toolcontext___ToolContext___error(fra.me.REG[1])(fra.me.REG[1], NIT_NULL, fra.me.REG[6]);
}
/* syntax/syntax.nit:47 */
fra.me.REG[2] = NEW_SourceFile_location___SourceFile___init(fra.me.REG[3], fra.me.REG[2]);
/* syntax/syntax.nit:48 */
fra.me.REG[3] = NEW_Lexer_parser___lexer___Lexer___init(fra.me.REG[2]);
/* syntax/syntax.nit:49 */
fra.me.REG[3] = NEW_Parser_parser___Parser___init(fra.me.REG[3]);
/* syntax/syntax.nit:50 */
fra.me.REG[3] = CALL_parser___Parser___parse(fra.me.REG[3])(fra.me.REG[3]);
/* syntax/syntax.nit:51 */
fra.me.REG[6] = CALL_parser___parser_nodes___Start___n_base(fra.me.REG[3])(fra.me.REG[3]);
REGB1 = TAG_Bool(fra.me.REG[6]==NIT_NULL);
if (UNTAG_Bool(REGB1)) {
} else {
REGB0 = TAG_Bool(fra.me.REG[6]==NIT_NULL);
if (UNTAG_Bool(REGB0)) {
REGB0 = TAG_Bool(0);
REGB1 = REGB0;
} else {
REGB0 = CALL_standard___kernel___Object_____eqeq(fra.me.REG[6])(fra.me.REG[6], NIT_NULL);
REGB1 = REGB0;
}
}
if (UNTAG_Bool(REGB1)) {
/* syntax/syntax.nit:52 */
fra.me.REG[6] = CALL_parser___parser_nodes___Start___n_eof(fra.me.REG[3])(fra.me.REG[3]);
/* syntax/syntax.nit:53 */
REGB1 = TAG_Bool(VAL_ISA(fra.me.REG[6], COLOR_parser___parser_nodes___AError, ID_parser___parser_nodes___AError)) /*cast AError*/;
if (UNTAG_Bool(REGB1)) {
} else {
nit_abort("Assert failed", NULL, LOCATE_syntax, 53);
}
/* syntax/syntax.nit:54 */
fra.me.REG[0] = CALL_parser___parser_nodes___ANode___location(fra.me.REG[6])(fra.me.REG[6]);
fra.me.REG[6] = CALL_parser___lexer___AError___message(fra.me.REG[6])(fra.me.REG[6]);
CALL_toolcontext___ToolContext___fatal_error(fra.me.REG[1])(fra.me.REG[1], fra.me.REG[0], fra.me.REG[6]);
}
/* syntax/syntax.nit:56 */
fra.me.REG[3] = CALL_parser___parser_nodes___Start___n_base(fra.me.REG[3])(fra.me.REG[3]);
/* syntax/syntax.nit:57 */
REGB1 = TAG_Bool(fra.me.REG[3]==NIT_NULL);
if (UNTAG_Bool(REGB1)) {
} else {
REGB0 = TAG_Bool(fra.me.REG[3]==NIT_NULL);
if (UNTAG_Bool(REGB0)) {
REGB0 = TAG_Bool(0);
REGB1 = REGB0;
} else {
REGB0 = CALL_standard___kernel___Object_____eqeq(fra.me.REG[3])(fra.me.REG[3], NIT_NULL);
REGB1 = REGB0;
}
}
REGB1 = TAG_Bool(!UNTAG_Bool(REGB1));
if (UNTAG_Bool(REGB1)) {
} else {
nit_abort("Assert failed", NULL, LOCATE_syntax, 57);
}
/* syntax/syntax.nit:58 */
fra.me.REG[2] = NEW_Location_location___Location___with_file(fra.me.REG[2]);
/* syntax/syntax.nit:59 */
fra.me.REG[2] = NEW_MMSrcModule_syntax___syntax_base___MMSrcModule___init(fra.me.REG[1], fra.me.REG[3], fra.me.REG[5], fra.me.REG[4], fra.me.REG[2]);
/* syntax/syntax.nit:60 */
goto label7;
label7: while(0);
stack_frame_head = fra.me.prev;
return fra.me.REG[2];
}
void OC_syntax___SrcModuleLoader___parse_file_1(struct stack_frame_t *closctx, val_t p0, struct stack_frame_t *closctx_param, fun_t clos_fun0){
struct {struct stack_frame_t me;} fra;
val_t REGB0;
val_t REGB1;
val_t REGB2;
fun_t CREG[1];
val_t tmp;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 0;
fra.me.meth = LOCATE_syntax___SrcModuleLoader___parse_file;
fra.me.has_broke = 0;
fra.me.REG_size = 1;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.closure_ctx = closctx_param;
fra.me.closure_funs = CREG;
fra.me.REG[0] = p0;
CREG[0] = clos_fun0;
REGB0 = fra.me.REG[0];
REGB1 = CALL_standard___kernel___Char___is_digit(REGB0)(REGB0);
REGB1 = TAG_Bool(!UNTAG_Bool(REGB1));
if (UNTAG_Bool(REGB1)) {
REGB1 = CALL_standard___kernel___Char___is_letter(REGB0)(REGB0);
REGB1 = TAG_Bool(!UNTAG_Bool(REGB1));
} else {
REGB2 = TAG_Bool(0);
REGB1 = REGB2;
}
if (UNTAG_Bool(REGB1)) {
REGB1 = TAG_Char('_');
REGB2 = TAG_Bool(IS_EQUAL_OO(REGB0,REGB1));
if (UNTAG_Bool(REGB2)) {
} else {
/* ../lib/standard/kernel.nit:434 */
REGB1 = TAG_Bool((REGB0)==(REGB1));
/* syntax/syntax.nit:38 */
REGB2 = REGB1;
}
REGB2 = TAG_Bool(!UNTAG_Bool(REGB2));
} else {
REGB1 = TAG_Bool(0);
REGB2 = REGB1;
}
if (UNTAG_Bool(REGB2)) {
/* syntax/syntax.nit:40 */
REGB2 = TAG_Bool(0);
closctx->REG[6] = REGB2;
/* syntax/syntax.nit:41 */
closctx->has_broke = 1;
goto label2;
}
label2: while(0);
stack_frame_head = fra.me.prev;
return;
}
static const char LOCATE_syntax___SrcModuleLoader___process_metamodel[] = "syntax::SrcModuleLoader::(mmloader::ModuleLoader::process_metamodel)";
void syntax___SrcModuleLoader___process_metamodel(val_t p0, val_t p1, val_t p2){
struct {struct stack_frame_t me; val_t MORE_REG[3];} fra;
val_t REGB0;
val_t tmp;
static val_t once_value_1; /* Once value */
static val_t once_value_2; /* Once value */
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 63;
fra.me.meth = LOCATE_syntax___SrcModuleLoader___process_metamodel;
fra.me.has_broke = 0;
fra.me.REG_size = 4;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[1] = NIT_NULL;
fra.me.REG[2] = NIT_NULL;
fra.me.REG[3] = NIT_NULL;
fra.me.REG[0] = p0;
fra.me.REG[1] = p1;
fra.me.REG[2] = p2;
REGB0 = TAG_Bool(VAL_ISA(fra.me.REG[2], VTCOLOR_mmloader___ModuleLoader___MODULE(fra.me.REG[0]), VTID_mmloader___ModuleLoader___MODULE(fra.me.REG[0]))) /*cast MODULE*/;
if (UNTAG_Bool(REGB0)) {
} else {
nit_abort("Cast failed", NULL, LOCATE_syntax, 0);
}
/* syntax/syntax.nit:65 */
CALL_syntax___MMSrcModule___process_supermodules(fra.me.REG[2])(fra.me.REG[2], fra.me.REG[1]);
/* syntax/syntax.nit:66 */
REGB0 = TAG_Int(3);
fra.me.REG[0] = NEW_Array_standard___collection___array___Array___with_capacity(REGB0);
if (!once_value_1) {
fra.me.REG[3] = BOX_NativeString("Syntax analysis for module: ");
REGB0 = TAG_Int(28);
fra.me.REG[3] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[3])(fra.me.REG[3], REGB0);
once_value_1 = fra.me.REG[3];
register_static_object(&once_value_1);
} else fra.me.REG[3] = once_value_1;
fra.me.REG[3] = fra.me.REG[3];
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[3]);
fra.me.REG[3] = CALL_metamodel___abstractmetamodel___MMModule___name(fra.me.REG[2])(fra.me.REG[2]);
fra.me.REG[3] = CALL_standard___string___Object___to_s(fra.me.REG[3])(fra.me.REG[3]);
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[3]);
if (!once_value_2) {
fra.me.REG[3] = BOX_NativeString("");
REGB0 = TAG_Int(0);
fra.me.REG[3] = CALL_standard___string___NativeString___to_s_with_length(fra.me.REG[3])(fra.me.REG[3], REGB0);
once_value_2 = fra.me.REG[3];
register_static_object(&once_value_2);
} else fra.me.REG[3] = once_value_2;
fra.me.REG[3] = fra.me.REG[3];
CALL_standard___collection___abstract_collection___SimpleCollection___add(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[3]);
fra.me.REG[0] = CALL_standard___string___Object___to_s(fra.me.REG[0])(fra.me.REG[0]);
REGB0 = TAG_Int(2);
CALL_toolcontext___ToolContext___info(fra.me.REG[1])(fra.me.REG[1], fra.me.REG[0], REGB0);
/* syntax/syntax.nit:67 */
CALL_syntax___MMSrcModule___process_syntax(fra.me.REG[2])(fra.me.REG[2], fra.me.REG[1]);
stack_frame_head = fra.me.prev;
return;
}
static const char LOCATE_syntax___SrcModuleLoader___init[] = "syntax::SrcModuleLoader::init";
void syntax___SrcModuleLoader___init(val_t p0, int* init_table){
int itpos0 = VAL2OBJ(p0)->vft[INIT_TABLE_POS_syntax___SrcModuleLoader].i;
struct {struct stack_frame_t me;} fra;
val_t tmp;
if (init_table[itpos0]) return;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 70;
fra.me.meth = LOCATE_syntax___SrcModuleLoader___init;
fra.me.has_broke = 0;
fra.me.REG_size = 1;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[0] = p0;
stack_frame_head = fra.me.prev;
init_table[itpos0] = 1;
return;
}
static const char LOCATE_syntax___MMSrcModule___process_supermodules[] = "syntax::MMSrcModule::process_supermodules";
void syntax___MMSrcModule___process_supermodules(val_t p0, val_t p1){
struct {struct stack_frame_t me; val_t MORE_REG[2];} fra;
val_t tmp;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 74;
fra.me.meth = LOCATE_syntax___MMSrcModule___process_supermodules;
fra.me.has_broke = 0;
fra.me.REG_size = 3;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[1] = NIT_NULL;
fra.me.REG[2] = NIT_NULL;
fra.me.REG[0] = p0;
fra.me.REG[1] = p1;
/* syntax/syntax.nit:77 */
fra.me.REG[2] = CALL_syntax___syntax_base___MMSrcModule___node(fra.me.REG[0])(fra.me.REG[0]);
CALL_syntax___mmbuilder___AModule___import_super_modules(fra.me.REG[2])(fra.me.REG[2], fra.me.REG[1], fra.me.REG[0]);
stack_frame_head = fra.me.prev;
return;
}
static const char LOCATE_syntax___MMSrcModule___process_syntax[] = "syntax::MMSrcModule::process_syntax";
void syntax___MMSrcModule___process_syntax(val_t p0, val_t p1){
struct {struct stack_frame_t me; val_t MORE_REG[1];} fra;
val_t REGB0;
val_t tmp;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 80;
fra.me.meth = LOCATE_syntax___MMSrcModule___process_syntax;
fra.me.has_broke = 0;
fra.me.REG_size = 2;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[1] = NIT_NULL;
fra.me.REG[0] = p0;
fra.me.REG[1] = p1;
/* syntax/syntax.nit:84 */
CALL_syntax___mmbuilder___MMSrcModule___do_mmbuilder(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[1]);
/* syntax/syntax.nit:85 */
CALL_toolcontext___ToolContext___check_errors(fra.me.REG[1])(fra.me.REG[1]);
/* syntax/syntax.nit:87 */
CALL_syntax___typing___MMSrcModule___do_typing(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[1]);
/* syntax/syntax.nit:88 */
CALL_toolcontext___ToolContext___check_errors(fra.me.REG[1])(fra.me.REG[1]);
/* syntax/syntax.nit:90 */
CALL_syntax___icode_generation___MMSrcModule___generate_icode(fra.me.REG[0])(fra.me.REG[0], fra.me.REG[1]);
/* syntax/syntax.nit:91 */
CALL_toolcontext___ToolContext___check_errors(fra.me.REG[1])(fra.me.REG[1]);
/* syntax/syntax.nit:93 */
REGB0 = CALL_syntax___ToolContext___keep_ast(fra.me.REG[1])(fra.me.REG[1]);
REGB0 = TAG_Bool(!UNTAG_Bool(REGB0));
if (UNTAG_Bool(REGB0)) {
CALL_syntax___syntax_base___MMSrcModule___clear_ast(fra.me.REG[0])(fra.me.REG[0]);
}
stack_frame_head = fra.me.prev;
return;
}
static const char LOCATE_syntax___ToolContext___keep_ast[] = "syntax::ToolContext::keep_ast";
val_t syntax___ToolContext___keep_ast(val_t p0){
struct {struct stack_frame_t me;} fra;
val_t REGB0;
val_t tmp;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 98;
fra.me.meth = LOCATE_syntax___ToolContext___keep_ast;
fra.me.has_broke = 0;
fra.me.REG_size = 1;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[0] = p0;
/* syntax/syntax.nit:98 */
REGB0 = TAG_Bool(ATTR_syntax___ToolContext____keep_ast(fra.me.REG[0])!=NIT_NULL);
if (UNTAG_Bool(REGB0)) {
} else {
nit_abort("Uninitialized attribute %s", "_keep_ast", LOCATE_syntax, 98);
}
REGB0 = ATTR_syntax___ToolContext____keep_ast(fra.me.REG[0]);
stack_frame_head = fra.me.prev;
return REGB0;
}
static const char LOCATE_syntax___ToolContext___keep_ast__eq[] = "syntax::ToolContext::keep_ast=";
void syntax___ToolContext___keep_ast__eq(val_t p0, val_t p1){
struct {struct stack_frame_t me;} fra;
val_t REGB0;
val_t tmp;
fra.me.prev = stack_frame_head; stack_frame_head = &fra.me;
fra.me.file = LOCATE_syntax;
fra.me.line = 98;
fra.me.meth = LOCATE_syntax___ToolContext___keep_ast__eq;
fra.me.has_broke = 0;
fra.me.REG_size = 1;
fra.me.nitni_local_ref_head = NULL;
fra.me.REG[0] = NIT_NULL;
fra.me.REG[0] = p0;
REGB0 = p1;
/* syntax/syntax.nit:98 */
ATTR_syntax___ToolContext____keep_ast(fra.me.REG[0]) = REGB0;
stack_frame_head = fra.me.prev;
return;
}
|
michauds/nit
|
c_src/syntax._sep.c
|
C
|
apache-2.0
| 18,784
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.elasticsearch.converter;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.camel.Converter;
import org.apache.camel.Exchange;
import org.apache.camel.component.elasticsearch.ElasticsearchConstants;
import org.apache.camel.util.ObjectHelper;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Converter(loader = true)
public final class ElasticsearchActionRequestConverter {
private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchActionRequestConverter.class);
private static final String ES_QUERY_DSL_PREFIX = "query";
private static final String PARENT = "parent";
private ElasticsearchActionRequestConverter() {
}
// Update requests
private static UpdateRequest createUpdateRequest(Object document, Exchange exchange) {
if (document instanceof UpdateRequest) {
return (UpdateRequest)document;
}
UpdateRequest updateRequest = new UpdateRequest();
if (document instanceof byte[]) {
updateRequest.doc((byte[])document);
} else if (document instanceof Map) {
updateRequest.doc((Map<String, Object>)document);
} else if (document instanceof String) {
updateRequest.doc((String)document, XContentFactory.xContentType((String)document));
} else if (document instanceof XContentBuilder) {
updateRequest.doc((XContentBuilder)document);
} else {
return null;
}
return updateRequest.waitForActiveShards(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_WAIT_FOR_ACTIVE_SHARDS, Integer.class))
.index(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.id(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
// Index requests
private static IndexRequest createIndexRequest(Object document, Exchange exchange) {
if (document instanceof IndexRequest) {
return (IndexRequest)document;
}
IndexRequest indexRequest = new IndexRequest();
if (document instanceof byte[]) {
indexRequest.source((byte[])document, XContentFactory.xContentType((byte[])document));
} else if (document instanceof Map) {
indexRequest.source((Map<String, Object>)document);
} else if (document instanceof String) {
indexRequest.source((String)document, XContentFactory.xContentType((String)document));
} else if (document instanceof XContentBuilder) {
indexRequest.source((XContentBuilder)document);
} else {
return null;
}
return indexRequest.waitForActiveShards(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_WAIT_FOR_ACTIVE_SHARDS, Integer.class))
.index(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class));
}
@Converter
public static IndexRequest toIndexRequest(Object document, Exchange exchange) {
return createIndexRequest(document, exchange).id(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
@Converter
public static UpdateRequest toUpdateRequest(Object document, Exchange exchange) {
return createUpdateRequest(document, exchange).id(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_ID, String.class));
}
@Converter
public static GetRequest toGetRequest(Object document, Exchange exchange) {
if (document instanceof GetRequest) {
return (GetRequest)document;
}
return new GetRequest(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.id((String)document);
}
@Converter
public static DeleteRequest toDeleteRequest(Object document, Exchange exchange) {
if (document instanceof DeleteRequest) {
return (DeleteRequest)document;
}
if (document instanceof String) {
return new DeleteRequest().index(exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class))
.id((String)document);
} else {
throw new IllegalArgumentException("Wrong body type. Only DeleteRequest or String is allowed as a type");
}
}
@Converter
public static DeleteIndexRequest toDeleteIndexRequest(Object document, Exchange exchange) {
if (document instanceof DeleteIndexRequest) {
return (DeleteIndexRequest)document;
}
if (document instanceof String) {
String index = exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class);
return new DeleteIndexRequest(index);
} else {
throw new IllegalArgumentException("Wrong body type. Only DeleteIndexRequest or String is allowed as a type");
}
}
@Converter
public static SearchRequest toSearchRequest(Object queryObject, Exchange exchange) throws IOException {
if (queryObject instanceof SearchRequest) {
return (SearchRequest)queryObject;
}
SearchRequest searchRequest = new SearchRequest();
// Only setup the indexName and indexType if the message header has the
// setting
String indexName = exchange.getIn().getHeader(ElasticsearchConstants.PARAM_INDEX_NAME, String.class);
if (ObjectHelper.isNotEmpty(indexName)) {
searchRequest.indices(indexName);
}
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
String queryText = null;
if (queryObject instanceof Map<?, ?>) {
Map<String, Object> mapQuery = (Map<String, Object>)queryObject;
// Remove 'query' prefix from the query object for backward
// compatibility
if (mapQuery.containsKey(ES_QUERY_DSL_PREFIX)) {
mapQuery = (Map<String, Object>)mapQuery.get(ES_QUERY_DSL_PREFIX);
}
try {
XContentBuilder contentBuilder = XContentFactory.contentBuilder(XContentType.JSON);
queryText = Strings.toString(contentBuilder.map(mapQuery));
} catch (IOException e) {
LOG.error("Cannot build the QueryText from the map.", e);
}
} else if (queryObject instanceof String) {
queryText = (String)queryObject;
ObjectMapper mapper = new ObjectMapper();
JsonNode jsonTextObject = mapper.readValue(queryText, JsonNode.class);
JsonNode parentJsonNode = jsonTextObject.get(ES_QUERY_DSL_PREFIX);
if (parentJsonNode != null) {
queryText = parentJsonNode.toString();
}
} else {
// Cannot convert the queryObject into SearchRequest
LOG.info("Cannot convert queryObject into SearchRequest object");
return null;
}
searchSourceBuilder.query(QueryBuilders.wrapperQuery(queryText));
searchRequest.source(searchSourceBuilder);
return searchRequest;
}
@Converter
public static BulkRequest toBulkRequest(Object documents, Exchange exchange) {
if (documents instanceof BulkRequest) {
return (BulkRequest)documents;
}
if (documents instanceof List) {
BulkRequest request = new BulkRequest();
for (Object document : (List<Object>)documents) {
request.add(createIndexRequest(document, exchange));
}
return request;
} else {
throw new IllegalArgumentException("Wrong body type. Only BulkRequest or List is allowed as a type");
}
}
}
|
Fabryprog/camel
|
components/camel-elasticsearch-rest/src/main/java/org/apache/camel/component/elasticsearch/converter/ElasticsearchActionRequestConverter.java
|
Java
|
apache-2.0
| 9,397
|
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
//go:build go1.16 && integration
// +build go1.16,integration
package codebuild_test
import (
"context"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/awstesting/integration"
"github.com/aws/aws-sdk-go/service/codebuild"
)
var _ aws.Config
var _ awserr.Error
var _ request.Request
func TestInteg_00_ListBuilds(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
sess := integration.SessionWithDefaultRegion("us-west-2")
svc := codebuild.New(sess)
params := &codebuild.ListBuildsInput{}
_, err := svc.ListBuildsWithContext(ctx, params, func(r *request.Request) {
r.Handlers.Validate.RemoveByName("core.ValidateParametersHandler")
})
if err != nil {
t.Errorf("expect no error, got %v", err)
}
}
|
aws/aws-sdk-go
|
service/codebuild/integ_test.go
|
GO
|
apache-2.0
| 953
|
using System.Diagnostics.Contracts;
using Bari.Core.UI;
using System;
namespace Bari.Core.Build.Dependencies
{
/// <summary>
/// Represents dependency on another builder (<see cref="IBuilder"/>)
/// </summary>
public class SubtaskDependency: DependenciesBase
{
private readonly IBuilder subtask;
/// <summary>
/// Constructs the dependency object
/// </summary>
/// <param name="subtask"></param>
public SubtaskDependency(IBuilder subtask)
{
Contract.Requires(subtask != null);
this.subtask = subtask;
}
/// <summary>
/// Creates fingerprint of the dependencies represented by this object, which can later be compared
/// to other fingerprints.
/// </summary>
/// <returns>Returns the fingerprint of the dependent item's current state.</returns>
protected override IDependencyFingerprint CreateFingerprint()
{
return subtask.Dependencies.Fingerprint;
}
public override void Dump(IUserOutput output)
{
output.Message(String.Format("Subtask {0}", subtask));
output.Indent();
try
{
subtask.Dependencies.Dump(output);
}
finally
{
output.Unindent();
}
}
}
}
|
Psychobilly87/bari
|
src/core/Bari.Core/cs/Build/Dependencies/SubtaskDependency.cs
|
C#
|
apache-2.0
| 1,450
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.security.authentication.external.impl.jmx;
import org.apache.jackrabbit.commons.json.JsonUtil;
import org.apache.jackrabbit.oak.spi.security.authentication.external.ExternalIdentityRef;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncResult;
import org.apache.jackrabbit.oak.spi.security.authentication.external.SyncedIdentity;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
final class ResultMessages {
private final List<String> messages = new ArrayList<>();
ResultMessages() {}
String[] getMessages() {
return messages.toArray(new String[0]);
}
void append(@NotNull List<SyncResult> results) {
for (SyncResult result : results) {
append(result);
}
}
void append(@NotNull List<SyncResult> results, @NotNull Exception e) {
for (SyncResult result : results) {
if (result instanceof ErrorSyncResult) {
append(result.getIdentity(), ((ErrorSyncResult) result).getException());
} else {
SyncResult.Status st = result.getStatus();
switch (st) {
case ADD:
case DELETE:
case UPDATE:
case ENABLE:
case DISABLE:
append(result.getIdentity(), e);
break;
default:
append(result);
}
}
}
}
private void append(@NotNull SyncResult r) {
if (r instanceof ErrorSyncResult) {
append(r.getIdentity(), ((ErrorSyncResult) r).getException());
} else {
append(r.getIdentity(), getOperationFromStatus(r.getStatus()), null);
}
}
private void append(@Nullable SyncedIdentity syncedIdentity, @NotNull Exception e) {
append(syncedIdentity, "ERR", e.toString());
}
private void append(@Nullable SyncedIdentity syncedIdentity, @NotNull String op, @Nullable String msg) {
String uid = JsonUtil.getJsonString((syncedIdentity == null ? null : syncedIdentity.getId()));
ExternalIdentityRef externalIdentityRef = (syncedIdentity == null) ? null : syncedIdentity.getExternalIdRef();
String eid = (externalIdentityRef == null) ? "\"\"" : JsonUtil.getJsonString(externalIdentityRef.getString());
if (msg == null) {
messages.add(String.format("{op:\"%s\",uid:%s,eid:%s}", op, uid, eid));
} else {
messages.add(String.format("{op:\"%s\",uid:%s,eid:%s,msg:%s}", op, uid, eid, JsonUtil.getJsonString(msg)));
}
}
private static String getOperationFromStatus(@NotNull SyncResult.Status syncStatus) {
String op;
switch (syncStatus) {
case ADD:
op = "add";
break;
case UPDATE:
op = "upd";
break;
case DELETE:
op = "del";
break;
case ENABLE:
op = "ena";
break;
case DISABLE:
op = "dis";
break;
case NO_SUCH_AUTHORIZABLE:
op = "nsa";
break;
case NO_SUCH_IDENTITY:
op = "nsi";
break;
case MISSING:
op = "mis";
break;
case FOREIGN:
op = "for";
break;
default:
// Status.NOP
op = "nop";
}
return op;
}
}
|
apache/jackrabbit-oak
|
oak-auth-external/src/main/java/org/apache/jackrabbit/oak/spi/security/authentication/external/impl/jmx/ResultMessages.java
|
Java
|
apache-2.0
| 4,532
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test conversion of graphs involving INT32 tensors and operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ExcludeUnsupportedInt32Test(trt_test.TfTrtIntegrationTestBase):
"""Test exclusion of ops which are not supported in INT32 mode by TF-TRT"""
def _ConstOp(self, shape, dtype):
return constant_op.constant(np.random.randn(*shape), dtype=dtype)
def GraphFn(self, x):
dtype = x.dtype
b = self._ConstOp((4, 10), dtype)
x = math_ops.matmul(x, b)
b = self._ConstOp((10,), dtype)
x = nn.bias_add(x, b)
return array_ops.identity(x, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.int32, [[100, 4]], [[100, 10]])
def GetConversionParams(self, run_params):
"""Return a ConversionParams for test."""
conversion_params = super(ExcludeUnsupportedInt32Test,
self).GetConversionParams(run_params)
return conversion_params._replace(
max_batch_size=100,
maximum_cached_engines=1,
# Disable layout optimizer, since it will convert BiasAdd with NHWC
# format to NCHW format under four dimentional input.
rewriter_config_template=trt_test.OptimizerDisabledRewriterConfig())
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return []
if __name__ == '__main__':
test.main()
|
alsrgv/tensorflow
|
tensorflow/python/compiler/tensorrt/test/int32_test.py
|
Python
|
apache-2.0
| 2,529
|
# lokki
This is the main repository for the Lokki project.
Lokki is an Open Source Project that develops a secure location sharing service. The code is based on a service developed by F-Secure Corporation and open sourced at the end of 2014. This repository is used for high-level tasks, general documentation, and the project web site (in the gh-pages branch).
# Code
[lokki](https://github.com/TheSoftwareFactory/lokki) -- This is the main repository for the project, and includes things like high-level issues, GitHub pages, user documentation, and general developer documentation.
[lokki-android](https://github.com/TheSoftwareFactory/lokki-android) -- This contains the Android client code as well as developer documentation and issues related to the Android client.
[lokki-server](https://github.com/TheSoftwareFactory/lokki-server) -- This contains the Node.js server code as well as developer documentation and issues related to the backend.
[lokki-ios](https://github.com/TheSoftwareFactory/lokki-ios) -- This contains the iOS client code as well as developer documentation and issues related to the iOS client. Not maintained at the moment.
[lokki-wp8](https://github.com/TheSoftwareFactory/lokki-wp8) -- This contains the Windows Phone client code as well as developer documentation and issues related to the Windows Phone client. Not maintained at the moment.
## Workflow
Project roughly follows the process described in [this guide](https://guides.github.com/activities/contributing-to-open-source/), with the following additions:
- Trivial things can be fixed by doing a pull request without having an related issue for them.
- When you start working on an issue, assign yourself to it. You don't have permissions to do that, comment to the issue that you're working on it.
- When creating a pull request, please add words "Connects to #0", replacing #0 with the relevant issue ID, to the pull request description. This [connects the pull request](https://github.com/waffleio/waffle.io/wiki/FAQs#prs-connect-keywords) to the relevant issue in our [waffle board](https://waffle.io/thesoftwarefactory/lokki).
- After making a pull request, you might want to be proactive and try to get someone to review and possibly merge it.
- We're using Continuous Integration, make sure the PR passes on Travis before merging.
# Communication and Collaboration
Join #lokki on IRCNet (for example, irc.stealth.net).
[waffle.io task board](https://waffle.io/thesoftwarefactory/lokki)
[](http://waffle.io/thesoftwarefactory/lokki)
|
TheSoftwareFactory/lokki
|
README.md
|
Markdown
|
apache-2.0
| 2,640
|
/**
* Copyright 2017 TerraMeta Software, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.plasma.provisioning.xsd;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xerces.dom.ElementNSImpl;
import org.plasma.common.provisioning.NameUtils;
import org.plasma.metamodel.Body;
import org.plasma.metamodel.Documentation;
import org.plasma.metamodel.DocumentationType;
import org.plasma.xml.schema.Annotated;
import org.plasma.xml.schema.Appinfo;
public abstract class AbstractAssembler {
private static Log log = LogFactory.getLog(AbstractAssembler.class);
protected String destNamespaceURI;
protected String destNamespacePrefix;
protected ConverterSupport support;
@SuppressWarnings("unused")
private AbstractAssembler() {
}
public AbstractAssembler(String destNamespaceURI, String destNamespacePrefix,
ConverterSupport converterSupport) {
super();
this.destNamespaceURI = destNamespaceURI;
this.destNamespacePrefix = destNamespacePrefix;
this.support = converterSupport;
}
protected Documentation createDocumentation(DocumentationType type, String content) {
Documentation documentation = new Documentation();
documentation.setType(type);
Body body = new Body();
body.setValue(content);
documentation.setBody(body);
return documentation;
}
protected String formatLocalClassName(String localName) {
if (localName == null || localName.trim().length() == 0)
throw new IllegalArgumentException("expected localName argument");
String result = localName;
result = NameUtils.firstToUpperCase(result);
return result;
}
protected String formatLocalPropertyName(String localName) {
if (localName == null || localName.trim().length() == 0)
throw new IllegalArgumentException("expected localName argument");
String result = localName;
result = NameUtils.firstToLowerCase(result);
return result;
}
protected String getDocumentationContent(Annotated annotated) {
StringBuilder buf = new StringBuilder();
if (annotated != null && annotated.getAnnotation() != null)
for (Object annotationObj : annotated.getAnnotation().getAppinfosAndDocumentations()) {
if (annotationObj instanceof org.plasma.xml.schema.Documentation) {
org.plasma.xml.schema.Documentation doc = (org.plasma.xml.schema.Documentation) annotationObj;
for (Object content : doc.getContent())
if (content instanceof String) {
buf.append(content);
} else if (content instanceof ElementNSImpl) {
ElementNSImpl nsElem = (ElementNSImpl) content;
buf.append(serializeElement(nsElem));
} else
throw new IllegalStateException("unexpected content class, "
+ annotationObj.getClass().getName());
} else if (annotationObj instanceof Appinfo) {
log.warn("ignoring app-info: " + String.valueOf(annotationObj));
}
}
return buf.toString();
}
protected String findAppInfoValue(org.plasma.xml.schema.Enumeration schemaEnum) {
String result = null;
if (schemaEnum.getAnnotation() != null)
for (Object o2 : schemaEnum.getAnnotation().getAppinfosAndDocumentations()) {
if (o2 instanceof Appinfo) {
Appinfo appinfo = (Appinfo) o2;
result = (String) appinfo.getContent().get(0);
if (result != null) {
result.trim();
if (result.length() == 0)
result = null;
}
break;
}
}
return result;
}
protected String serializeElement(ElementNSImpl nsElem) {
String result = "";
TransformerFactory transFactory = TransformerFactory.newInstance();
log.debug("transformer factory: " + transFactory.getClass().getName());
// transFactory.setAttribute("indent-number", 2);
Transformer idTransform = null;
ByteArrayOutputStream stream = null;
try {
idTransform = transFactory.newTransformer();
idTransform.setOutputProperty(OutputKeys.METHOD, "xml");
idTransform.setOutputProperty(OutputKeys.INDENT, "yes");
idTransform.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
Source input = new DOMSource(nsElem.getOwnerDocument());
stream = new ByteArrayOutputStream();
Result output = new StreamResult(stream);
idTransform.transform(input, output);
stream.flush();
result = new String(stream.toByteArray());
return result;
} catch (TransformerConfigurationException e1) {
log.error(e1.getMessage(), e1);
} catch (TransformerException e) {
log.error(e.getMessage(), e);
} catch (IOException e) {
log.error(e.getMessage(), e);
} finally {
if (stream != null)
try {
stream.close();
} catch (Throwable t) {
}
}
return result;
}
}
|
plasma-framework/plasma
|
plasma-core/src/main/java/org/plasma/provisioning/xsd/AbstractAssembler.java
|
Java
|
apache-2.0
| 5,925
|
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.mapdemo;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Polygon;
import com.google.android.gms.maps.model.PolygonOptions;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.CheckBox;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import java.util.Arrays;
import java.util.List;
/**
* This shows how to draw polygons on a map.
*/
public class PolygonDemoActivity extends AppCompatActivity
implements OnSeekBarChangeListener, OnMapReadyCallback {
private static final LatLng SYDNEY = new LatLng(-33.87365, 151.20689);
private static final int WIDTH_MAX = 50;
private static final int HUE_MAX = 360;
private static final int ALPHA_MAX = 255;
private Polygon mMutablePolygon;
private Polygon mClickablePolygonWithHoles;
private SeekBar mColorBar;
private SeekBar mAlphaBar;
private SeekBar mWidthBar;
private CheckBox mClickabilityCheckbox;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.polygon_demo);
mColorBar = (SeekBar) findViewById(R.id.hueSeekBar);
mColorBar.setMax(HUE_MAX);
mColorBar.setProgress(0);
mAlphaBar = (SeekBar) findViewById(R.id.alphaSeekBar);
mAlphaBar.setMax(ALPHA_MAX);
mAlphaBar.setProgress(127);
mWidthBar = (SeekBar) findViewById(R.id.widthSeekBar);
mWidthBar.setMax(WIDTH_MAX);
mWidthBar.setProgress(10);
mClickabilityCheckbox = (CheckBox) findViewById(R.id.toggleClickability);
SupportMapFragment mapFragment =
(SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
}
@Override
public void onMapReady(GoogleMap map) {
// Override the default content description on the view, for accessibility mode.
// Ideally this string would be localised.
map.setContentDescription("Google Map with polygons.");
// Create a rectangle with two rectangular holes.
mClickablePolygonWithHoles = map.addPolygon(new PolygonOptions()
.addAll(createRectangle(new LatLng(-20, 130), 5, 5))
.addHole(createRectangle(new LatLng(-22, 128), 1, 1))
.addHole(createRectangle(new LatLng(-18, 133), 0.5, 1.5))
.fillColor(Color.CYAN)
.strokeColor(Color.BLUE)
.strokeWidth(5)
.clickable(mClickabilityCheckbox.isChecked()));
// Create a rectangle centered at Sydney.
PolygonOptions options = new PolygonOptions()
.addAll(createRectangle(SYDNEY, 5, 8))
.clickable(mClickabilityCheckbox.isChecked());
int fillColor = Color.HSVToColor(
mAlphaBar.getProgress(), new float[]{mColorBar.getProgress(), 1, 1});
mMutablePolygon = map.addPolygon(options
.strokeWidth(mWidthBar.getProgress())
.strokeColor(Color.BLACK)
.fillColor(fillColor));
// Create another polygon that overlaps the previous two.
// Clickability defaults to false, so this one won't accept clicks.
map.addPolygon(new PolygonOptions()
.addAll(createRectangle(new LatLng(-27, 140), 10, 7))
.fillColor(Color.WHITE)
.strokeColor(Color.BLACK));
mColorBar.setOnSeekBarChangeListener(this);
mAlphaBar.setOnSeekBarChangeListener(this);
mWidthBar.setOnSeekBarChangeListener(this);
// Move the map so that it is centered on the mutable polygon.
map.moveCamera(CameraUpdateFactory.newLatLng(SYDNEY));
// Add a listener for polygon clicks that changes the clicked polygon's stroke color.
map.setOnPolygonClickListener(new GoogleMap.OnPolygonClickListener() {
@Override
public void onPolygonClick(Polygon polygon) {
// Flip the r, g and b components of the polygon's stroke color.
int strokeColor = polygon.getStrokeColor() ^ 0x00ffffff;
polygon.setStrokeColor(strokeColor);
}
});
}
/**
* Creates a List of LatLngs that form a rectangle with the given dimensions.
*/
private List<LatLng> createRectangle(LatLng center, double halfWidth, double halfHeight) {
return Arrays.asList(new LatLng(center.latitude - halfHeight, center.longitude - halfWidth),
new LatLng(center.latitude - halfHeight, center.longitude + halfWidth),
new LatLng(center.latitude + halfHeight, center.longitude + halfWidth),
new LatLng(center.latitude + halfHeight, center.longitude - halfWidth),
new LatLng(center.latitude - halfHeight, center.longitude - halfWidth));
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// Don't do anything here.
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// Don't do anything here.
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (mMutablePolygon == null) {
return;
}
if (seekBar == mColorBar) {
mMutablePolygon.setFillColor(Color.HSVToColor(
Color.alpha(mMutablePolygon.getFillColor()), new float[]{progress, 1, 1}));
} else if (seekBar == mAlphaBar) {
int prevColor = mMutablePolygon.getFillColor();
mMutablePolygon.setFillColor(Color.argb(
progress, Color.red(prevColor), Color.green(prevColor),
Color.blue(prevColor)));
} else if (seekBar == mWidthBar) {
mMutablePolygon.setStrokeWidth(progress);
}
}
/**
* Toggles the clickability of two polygons based on the state of the View that triggered this
* call.
* This callback is defined on the CheckBox in the layout for this Activity.
*/
public void toggleClickability(View view) {
if (mClickablePolygonWithHoles != null) {
mClickablePolygonWithHoles.setClickable(((CheckBox) view).isChecked());
}
if (mMutablePolygon != null) {
mMutablePolygon.setClickable(((CheckBox) view).isChecked());
}
}
}
|
viktorkh/ApiDemos
|
app/src/main/java/com/example/mapdemo/PolygonDemoActivity.java
|
Java
|
apache-2.0
| 7,375
|
package net.ros.common.machine.module;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraftforge.common.capabilities.Capability;
import javax.annotation.Nullable;
public interface ICapabilityModule
{
boolean hasCapability(Capability<?> capability, BlockPos from, @Nullable EnumFacing facing);
@Nullable
<T> T getCapability(Capability<T> capability, BlockPos from, @Nullable EnumFacing facing);
}
|
mantal/Qbar
|
common/src/main/java/net/ros/common/machine/module/ICapabilityModule.java
|
Java
|
apache-2.0
| 459
|
<html dir="LTR">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=Windows-1252" />
<meta name="vs_targetSchema" content="http://schemas.microsoft.com/intellisense/ie5" />
<title>BufferedIndexOutput Constructor</title>
<xml>
</xml>
<link rel="stylesheet" type="text/css" href="MSDN.css" />
</head>
<body id="bodyID" class="dtBODY">
<div id="nsbanner">
<div id="bannerrow1">
<table class="bannerparthead" cellspacing="0">
<tr id="hdr">
<td class="runninghead">Apache Lucene.Net 2.4.0 Class Library API</td>
<td class="product">
</td>
</tr>
</table>
</div>
<div id="TitleRow">
<h1 class="dtH1">BufferedIndexOutput Constructor </h1>
</div>
</div>
<div id="nstext">
<p>Initializes a new instance of the <a href="Lucene.Net.Store.BufferedIndexOutput.html">BufferedIndexOutput</a> class.</p>
<div class="syntax">protected BufferedIndexOutput();</div>
<h4 class="dtH4">See Also</h4>
<p>
<a href="Lucene.Net.Store.BufferedIndexOutput.html">BufferedIndexOutput Class</a> | <a href="Lucene.Net.Store.html">Lucene.Net.Store Namespace</a></p>
<object type="application/x-oleobject" classid="clsid:1e2a7bd0-dab9-11d0-b93a-00c04fc99f9e" viewastext="true" style="display: none;">
<param name="Keyword" value="BufferedIndexOutput class, constructor">
</param>
</object>
<hr />
<div id="footer">
<p>
</p>
<p>Generated from assembly Lucene.Net [2.4.0.2]</p>
</div>
</div>
</body>
</html>
|
Mpdreamz/lucene.net
|
doc/core/Lucene.Net.Store.BufferedIndexOutputConstructor.html
|
HTML
|
apache-2.0
| 1,682
|
/*
* Copyright 2016 Robin Engel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package commons
import "net/http"
func HttpNoContent(w http.ResponseWriter) {
HttpError(w, http.StatusNoContent)
}
func HttpUnauthorized(w http.ResponseWriter) {
HttpError(w, http.StatusUnauthorized)
}
func HttpBadRequest(w http.ResponseWriter) {
HttpError(w, http.StatusBadRequest)
}
func HttpError(w http.ResponseWriter, code int) {
http.Error(w, http.StatusText(code), code)
}
func HttpCheckError(err error, status int, w http.ResponseWriter) {
if err != nil {
HttpError(w, status)
}
}
|
bluedevel/mosel
|
commons/http.go
|
GO
|
apache-2.0
| 1,097
|
/**
* Copyright 2016 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.pipeline.stage.destination.kudu;
import com.streamsets.pipeline.api.GenerateResourceBundle;
import com.streamsets.pipeline.api.Label;
@GenerateResourceBundle
public enum ConsistencyMode implements Label {
CLIENT_PROPAGATED("Client Propagated"), COMMIT_WAIT("Commit Wait");
private String label;
ConsistencyMode(String label) {
this.label = label;
}
@Override
public String getLabel() {
return label;
}
}
|
studanshu/datacollector
|
kudu-protolib/src/main/java/com/streamsets/pipeline/stage/destination/kudu/ConsistencyMode.java
|
Java
|
apache-2.0
| 1,303
|
require 'devise/strategies/http_header_authenticatable'
module Devise
module Models
module HttpHeaderAuthenticatable
extend ActiveSupport::Concern
def after_database_authentication
end
protected
end
end
end
|
zhang4952/scholarsphere
|
lib/devise/models/http_header_authenticatable.rb
|
Ruby
|
apache-2.0
| 245
|
//** Smooth Navigational Menu- By Dynamic Drive DHTML code library: http://www.dynamicdrive.com
//** Script Download/ instructions page: http://www.dynamicdrive.com/dynamicindex1/ddlevelsmenu/
//** Menu created: Nov 12, 2008
//** Dec 12th, 08" (v1.01): Fixed Shadow issue when multiple LIs within the same UL (level) contain sub menus: http://www.dynamicdrive.com/forums/showthread.php?t=39177&highlight=smooth
//** Feb 11th, 09" (v1.02): The currently active main menu item (LI A) now gets a CSS class of ".selected", including sub menu items.
//** May 1st, 09" (v1.3):
//** 1) Now supports vertical (side bar) menu mode- set "orientation" to 'v'
//** 2) In IE6, shadows are now always disabled
//** July 27th, 09" (v1.31): Fixed bug so shadows can be disabled if desired.
//** Feb 2nd, 10" (v1.4): Adds ability to specify delay before sub menus appear and disappear, respectively. See showhidedelay variable below
//** Dec 17th, 10" (v1.5): Updated menu shadow to use CSS3 box shadows when the browser is FF3.5+, IE9+, Opera9.5+, or Safari3+/Chrome. Only .js file changed.
var ddsmoothmenu={
//Specify full URL to down and right arrow images (23 is padding-right added to top level LIs with drop downs):
arrowimages: {down:['downarrowclass', 'scripts/menu/down.png', 2], right:['rightarrowclass', 'scripts/menu/right.gif']},
transition: {overtime:300, outtime:300}, //duration of slide in/ out animation, in milliseconds
shadow: {enable:true, offsetx:5, offsety:5}, //enable shadow?
showhidedelay: {showdelay: 100, hidedelay: 200}, //set delay in milliseconds before sub menus appear and disappear, respectively
///////Stop configuring beyond here///////////////////////////
detectwebkit: navigator.userAgent.toLowerCase().indexOf("applewebkit")!=-1, //detect WebKit browsers (Safari, Chrome etc)
detectie6: document.all && !window.XMLHttpRequest,
css3support: window.msPerformance || (!document.all && document.querySelector), //detect browsers that support CSS3 box shadows (ie9+ or FF3.5+, Safari3+, Chrome etc)
getajaxmenu:function($, setting){ //function to fetch external page containing the panel DIVs
var $menucontainer=$('#'+setting.contentsource[0]) //reference empty div on page that will hold menu
$menucontainer.html("Loading Menu...")
$.ajax({
url: setting.contentsource[1], //path to external menu file
async: true,
error:function(ajaxrequest){
$menucontainer.html('Error fetching content. Server Response: '+ajaxrequest.responseText)
},
success:function(content){
$menucontainer.html(content)
ddsmoothmenu.buildmenu($, setting)
}
})
},
buildmenu:function($, setting){
var smoothmenu=ddsmoothmenu
var $mainmenu=$("#"+setting.mainmenuid+">ul") //reference main menu UL
$mainmenu.parent().get(0).className=setting.classname || "ddsmoothmenu"
var $headers=$mainmenu.find("ul").parent()
$headers.hover(
function(e){
$(this).children('a:eq(0)').addClass('selected')
},
function(e){
$(this).children('a:eq(0)').removeClass('selected')
}
)
$headers.each(function(i){ //loop through each LI header
var $curobj=$(this).css({zIndex: 100-i}) //reference current LI header
var $subul=$(this).find('ul:eq(0)').css({display:'block'})
$subul.data('timers', {})
this._dimensions={w:this.offsetWidth, h:this.offsetHeight, subulw:$subul.outerWidth(), subulh:$subul.outerHeight()}
this.istopheader=$curobj.parents("ul").length==1? true : false //is top level header?
$subul.css({top:this.istopheader && setting.orientation!='v'? this._dimensions.h+"px" : 0})
$curobj.children("a:eq(0)").css(this.istopheader? {paddingRight: smoothmenu.arrowimages.down[2]} : {}).append( //add arrow images
'<img src="'+ (this.istopheader && setting.orientation!='v'? smoothmenu.arrowimages.down[1] : smoothmenu.arrowimages.right[1])
+'" class="' + (this.istopheader && setting.orientation!='v'? smoothmenu.arrowimages.down[0] : smoothmenu.arrowimages.right[0])
+ '" style="border:0;" />'
)
if (smoothmenu.shadow.enable && !smoothmenu.css3support){ //if shadows enabled and browser doesn't support CSS3 box shadows
this._shadowoffset={x:(this.istopheader?$subul.offset().left+smoothmenu.shadow.offsetx : this._dimensions.w), y:(this.istopheader? $subul.offset().top+smoothmenu.shadow.offsety : $curobj.position().top)} //store this shadow's offsets
if (this.istopheader)
$parentshadow=$(document.body)
else{
var $parentLi=$curobj.parents("li:eq(0)")
$parentshadow=$parentLi.get(0).$shadow
}
this.$shadow=$('<div class="ddshadow'+(this.istopheader? ' toplevelshadow' : '')+'"></div>').prependTo($parentshadow).css({left:this._shadowoffset.x+'px', top:this._shadowoffset.y+'px'}) //insert shadow DIV and set it to parent node for the next shadow div
}
$curobj.hover(
function(e){
var $targetul=$subul //reference UL to reveal
var header=$curobj.get(0) //reference header LI as DOM object
clearTimeout($targetul.data('timers').hidetimer)
$targetul.data('timers').showtimer=setTimeout(function(){
header._offsets={left:$curobj.offset().left, top:$curobj.offset().top}
var menuleft=header.istopheader && setting.orientation!='v'? 0 : header._dimensions.w
menuleft=(header._offsets.left+menuleft+header._dimensions.subulw>$(window).width())? (header.istopheader && setting.orientation!='v'? -header._dimensions.subulw+header._dimensions.w : -header._dimensions.w) : menuleft //calculate this sub menu's offsets from its parent
if ($targetul.queue().length<=1){ //if 1 or less queued animations
$targetul.css({left:menuleft+"px", width:header._dimensions.subulw+'px'}).animate({height:'show',opacity:'show'}, ddsmoothmenu.transition.overtime)
if (smoothmenu.shadow.enable && !smoothmenu.css3support){
var shadowleft=header.istopheader? $targetul.offset().left+ddsmoothmenu.shadow.offsetx : menuleft
var shadowtop=header.istopheader?$targetul.offset().top+smoothmenu.shadow.offsety : header._shadowoffset.y
if (!header.istopheader && ddsmoothmenu.detectwebkit){ //in WebKit browsers, restore shadow's opacity to full
header.$shadow.css({opacity:1})
}
header.$shadow.css({overflow:'', width:header._dimensions.subulw+'px', left:shadowleft+'px', top:shadowtop+'px'}).animate({height:header._dimensions.subulh+'px'}, ddsmoothmenu.transition.overtime)
}
}
}, ddsmoothmenu.showhidedelay.showdelay)
},
function(e){
var $targetul=$subul
var header=$curobj.get(0)
clearTimeout($targetul.data('timers').showtimer)
$targetul.data('timers').hidetimer=setTimeout(function(){
$targetul.animate({height:'hide', opacity:'hide'}, ddsmoothmenu.transition.outtime)
if (smoothmenu.shadow.enable && !smoothmenu.css3support){
if (ddsmoothmenu.detectwebkit){ //in WebKit browsers, set first child shadow's opacity to 0, as "overflow:hidden" doesn't work in them
header.$shadow.children('div:eq(0)').css({opacity:0})
}
header.$shadow.css({overflow:'hidden'}).animate({height:0}, ddsmoothmenu.transition.outtime)
}
}, ddsmoothmenu.showhidedelay.hidedelay)
}
) //end hover
}) //end $headers.each()
if (smoothmenu.shadow.enable && smoothmenu.css3support){ //if shadows enabled and browser supports CSS3 shadows
var $toplevelul=$('#'+setting.mainmenuid+' ul li ul')
var css3shadow=parseInt(smoothmenu.shadow.offsetx)+"px "+parseInt(smoothmenu.shadow.offsety)+"px 5px #aaa" //construct CSS3 box-shadow value
var shadowprop=["boxShadow", "MozBoxShadow", "WebkitBoxShadow", "MsBoxShadow"] //possible vendor specific CSS3 shadow properties
for (var i=0; i<shadowprop.length; i++){
$toplevelul.css(shadowprop[i], css3shadow)
}
}
$mainmenu.find("ul").css({display:'none', visibility:'visible'})
},
init:function(setting){
if (typeof setting.customtheme=="object" && setting.customtheme.length==2){ //override default menu colors (default/hover) with custom set?
var mainmenuid='#'+setting.mainmenuid
var mainselector=(setting.orientation=="v")? mainmenuid : mainmenuid+', '+mainmenuid
document.write('<style type="text/css">\n'
+mainselector+' ul li a {background:'+setting.customtheme[0]+';}\n'
+mainmenuid+' ul li a:hover {background:'+setting.customtheme[1]+';}\n'
+'</style>')
}
this.shadow.enable=(document.all && !window.XMLHttpRequest)? false : this.shadow.enable //in IE6, always disable shadow
jQuery(document).ready(function($){ //ajax menu?
if (typeof setting.contentsource=="object"){ //if external ajax menu
ddsmoothmenu.getajaxmenu($, setting)
}
else{ //else if markup menu
ddsmoothmenu.buildmenu($, setting)
}
})
}
} //end ddsmoothmenu variable
|
vikigroup/cbuilk
|
web/skin/temp12/scripts/menu/ddsmoothmenu.js
|
JavaScript
|
apache-2.0
| 8,801
|
<?php
if (!defined('BASEPATH'))
exit('No direct script access allowed');
class Profile extends DataMapper {
var $has_one = array('user');
var $has_many = array();
var $validation = array(
'user_id' => array(
'rules' => array(),
'label' => 'User ID'
),
'group_id' => array(
'rules' => array(),
'label' => 'Group ID',
),
'display_name' => array(
'rules' => array('max_length' => 20),
'label' => 'Publicly displayed username',
'type' => 'input'
),
'twitter' => array(
'rules' => array('max_length' => 30),
'label' => 'Twitter username',
'type' => 'input'
),
'bio' => array(
'rules' => array('max_length' => 140),
'label' => 'Bio',
'type' => 'textarea'
)
);
function __construct($id = NULL) {
parent::__construct($id);
}
function post_model_init($from_cache = FALSE) {
}
function change_group($user_id, $group_id)
{
$CI = & get_instance();
if(!$CI->tank_auth->is_admin()) return false;
$this->where('user_id', $user_id)->get();
$this->group_id = $group_id;
if(!$this->save())
{
log_message('error', 'change_group(): Could not change group.');
return false;
}
return true;
}
}
|
FoolCode/FoOlSlide
|
application/models/profile.php
|
PHP
|
apache-2.0
| 1,187
|
#
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "cacerts"
license "MPL-2.0"
license_file "https://github.com/bagder/ca-bundle/blob/master/README.md"
skip_transitive_dependency_licensing true
default_version "2017-01-18"
source url: "https://curl.haxx.se/ca/cacert-#{version}.pem"
version("2017-01-18") { source sha256: "e62a07e61e5870effa81b430e1900778943c228bd7da1259dd6a955ee2262b47" }
version "2016-04-20" do
source md5: "782dcde8f5d53b1b9e888fdf113c42b9"
end
version "2016.01.20" do
source md5: "06629db7f712ff3a75630eccaecc1fe4"
source url: "https://curl.haxx.se/ca/cacert-2016-01-20.pem"
end
relative_path "cacerts-#{version}"
build do
mkdir "#{install_dir}/embedded/ssl/certs"
copy "#{project_dir}/cacert*.pem", "#{install_dir}/embedded/ssl/certs/cacert.pem"
# Windows does not support symlinks
unless windows?
link "#{install_dir}/embedded/ssl/certs/cacert.pem", "#{install_dir}/embedded/ssl/cert.pem"
block { File.chmod(0644, "#{install_dir}/embedded/ssl/certs/cacert.pem") }
end
end
VERISIGN_CERTS = <<-EOH
Verisign Class 3 Public Primary Certification Authority
=======================================================
-----BEGIN CERTIFICATE-----
MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkGA1UEBhMCVVMx
FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5
IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVow
XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz
IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA
A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94
f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol
hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBAgUAA4GBALtMEivPLCYA
TxQT3ab7/AoRhIzzKBxnki98tsX63/Dolbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59Ah
WM1pF+NEHJwZRDmJXNycAA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2Omuf
Tqj/ZA1k
-----END CERTIFICATE-----
Verisign Class 3 Public Primary Certification Authority
=======================================================
-----BEGIN CERTIFICATE-----
MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UEBhMCVVMx
FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5
IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVow
XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz
IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA
A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94
f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol
hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBABByUqkFFBky
CEHwxWsKzH4PIRnN5GfcX6kb5sroc50i2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWX
bj9T/UWZYB2oK0z5XqcJ2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/
D/xwzoiQ
-----END CERTIFICATE-----
EOH
|
KiiCorp/omnibus-software
|
config/software/cacerts.rb
|
Ruby
|
apache-2.0
| 3,499
|
<!doctype html>
<HTML>
<?php
//celkovy zaciatok
do
{
$sys = 'UCT';
$urov = 3000;
$copern = $_REQUEST['copern'];
$uziv = include("../uziv.php");
if ( !$uziv ) exit;
require_once("../pswd/password.php");
@$spojeni = mysql_connect($mysqlhost, $mysqluser, $mysqlpasswd);
if (!$spojeni):
echo "Spojenie so serverom nedostupne.";
exit;
endif;
mysql_select_db($mysqldb);
//datumove funkcie
$sDat = include("../funkcie/dat_sk_us.php");
//ramcek fpdf 1=zap,0=vyp
$rmc=0;
$rmc1=0;
//.jpg podklad
if ( $kli_vrok < 2018 ) {
$jpg_cesta="../dokumenty/statistika2016/fin304/fin3-04_v16";
}
if ( $kli_vrok >= 2018 ) {
$jpg_cesta="../dokumenty/tlacivo2018/fin3-04/fin3-04_v18";
}
$jpg_popis="Finanèný výkaz o finanèných aktívach pod¾a sektorov subjektu verejnej správy FIN 3-04 za rok ".$kli_vrok;
$pole = explode(".", $kli_vume);
$kli_vmes=$pole[0];
$kli_vrok=$pole[1];
$citfir = include("../cis/citaj_fir.php");
$cislo_oc = 1*$_REQUEST['cislo_oc'];
$subor = $_REQUEST['subor'];
$strana = 1*$_REQUEST['strana'];
if ( $strana == 0 ) $strana=9999;
if ( $cislo_oc == 0 ) $cislo_oc=1;
if ( $cislo_oc == 1 ) { $datum="31.03.".$kli_vrok; $mesiac="03"; $kli_vume="3.".$kli_vrok; }
if ( $cislo_oc == 2 ) { $datum="30.06.".$kli_vrok; $mesiac="06"; $kli_vume="6.".$kli_vrok; }
if ( $cislo_oc == 3 ) { $datum="30.09.".$kli_vrok; $mesiac="09"; $kli_vume="9.".$kli_vrok; }
if ( $cislo_oc == 4 ) { $datum="31.12.".$kli_vrok; $mesiac="12"; $kli_vume="12.".$kli_vrok; }
$vsetkyprepocty=0;
//ak nie je generovanie daj standardne
$niejegen=0;
$sql = "SELECT * FROM F".$kli_vxcf."_genfin304 ";
$vysledok = mysql_query($sql);
if (!$vysledok)
{
$copern=1001;
$niejegen=1;
}
//koniec ak nie je generovanie daj standardne
//Tabulka generovania
if ( $copern == 1001 )
{
$sql = "DROP TABLE F$kli_vxcf"."_genfin304";
$vysledok = mysql_query("$sql");
$sqlt = <<<crf204nuj_no
(
cpl int not null auto_increment,
uce VARCHAR(10),
crs INT,
cpl01 INT,
PRIMARY KEY(cpl)
);
crf204nuj_no;
$sql = 'CREATE TABLE F'.$kli_vxcf.'_genfin304'.$sqlt;
$vysledek = mysql_query("$sql");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '251', '2' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '257', '2' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '291', '2' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '061', '2' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '062', '2' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '253', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '255', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '256', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '257', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '259', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '065', '6' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '066', '8' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '067', '8' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '069', '8' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '051', '10' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '052', '10' ); "; $ulozene = mysql_query("$sqult");
$sqult = "INSERT INTO F$kli_vxcf"."_genfin304 ( uce,crs ) VALUES ( '053', '10' ); "; $ulozene = mysql_query("$sqult");
$nacitajgen = 1*$_REQUEST['nacitajgen'];
if ( $nacitajgen == 1 ) {
?>
<script type="text/javascript">
window.open('../ucto/fin_cis.php?copern=308&drupoh=93&page=1&sysx=UCT', '_self');
</script>
<?php
exit;
}
$copern=20;
}
//koniec Tabulka generovania
//znovu nacitaj
if ( $copern == 26 )
{
//echo "citam";
$nasielvyplnene=0;
$sqtoz = "DELETE FROM F$kli_vxcf"."_uctvykaz_fin304 WHERE oc = $cislo_oc";
$oznac = mysql_query("$sqtoz");
$copern=20;
if( $zupravy == 1 ) $copern=20;
$subor=1;
$vsetkyprepocty=1;
}
//koniec znovu nacitaj
//zapis upravene udaje
if ( $copern == 23 )
{
if ( $strana == 1 ) {
//$okres = strip_tags($_REQUEST['okres']);
//$obec = strip_tags($_REQUEST['obec']);
$daz = $_REQUEST['daz'];
$daz_sql = SqlDatum($daz);
$uprtxt = "UPDATE F$kli_vxcf"."_uctvykaz_fin304 SET ".
" daz='$daz_sql' ".
" WHERE oc = $cislo_oc";
}
if ( $strana == 2 ) {
$pocs01 = 1*$_REQUEST['pocs01'];
$pocs02 = 1*$_REQUEST['pocs02'];
$pocs03 = 1*$_REQUEST['pocs03'];
$pocs04 = 1*$_REQUEST['pocs04'];
$pocs05 = 1*$_REQUEST['pocs05'];
$pocs06 = 1*$_REQUEST['pocs06'];
$pocs07 = 1*$_REQUEST['pocs07'];
$pocs08 = 1*$_REQUEST['pocs08'];
$pocs09 = 1*$_REQUEST['pocs09'];
$pocs10 = 1*$_REQUEST['pocs10'];
$zvys01 = 1*$_REQUEST['zvys01'];
$zvys02 = 1*$_REQUEST['zvys02'];
$zvys03 = 1*$_REQUEST['zvys03'];
$zvys04 = 1*$_REQUEST['zvys04'];
$zvys05 = 1*$_REQUEST['zvys05'];
$zvys06 = 1*$_REQUEST['zvys06'];
$zvys07 = 1*$_REQUEST['zvys07'];
$zvys08 = 1*$_REQUEST['zvys08'];
$zvys09 = 1*$_REQUEST['zvys09'];
$zvys10 = 1*$_REQUEST['zvys10'];
$uprtxt = "UPDATE F$kli_vxcf"."_uctvykaz_fin304 SET ".
" pocs01='$pocs01', pocs02='$pocs02', pocs03='$pocs03', pocs04='$pocs04', pocs05='$pocs05',
pocs06='$pocs06', pocs07='$pocs07', pocs08='$pocs08', pocs09='$pocs09', pocs10='$pocs10',
zvys01='$zvys01', zvys02='$zvys02', zvys03='$zvys03', zvys04='$zvys04', zvys05='$zvys05',
zvys06='$zvys06', zvys07='$zvys07', zvys08='$zvys08', zvys09='$zvys09', zvys10='$zvys10' ".
" WHERE oc = $cislo_oc";
}
if ( $strana == 3 ) {
$znis01 = 1*$_REQUEST['znis01'];
$znis02 = 1*$_REQUEST['znis02'];
$znis03 = 1*$_REQUEST['znis03'];
$znis04 = 1*$_REQUEST['znis04'];
$znis05 = 1*$_REQUEST['znis05'];
$znis06 = 1*$_REQUEST['znis06'];
$znis07 = 1*$_REQUEST['znis07'];
$znis08 = 1*$_REQUEST['znis08'];
$znis09 = 1*$_REQUEST['znis09'];
$znis10 = 1*$_REQUEST['znis10'];
$oces01 = 1*$_REQUEST['oces01'];
$oces02 = 1*$_REQUEST['oces02'];
$oces03 = 1*$_REQUEST['oces03'];
$oces04 = 1*$_REQUEST['oces04'];
$oces05 = 1*$_REQUEST['oces05'];
$oces06 = 1*$_REQUEST['oces06'];
$oces07 = 1*$_REQUEST['oces07'];
$oces08 = 1*$_REQUEST['oces08'];
$oces09 = 1*$_REQUEST['oces09'];
$oces10 = 1*$_REQUEST['oces10'];
$osts01 = 1*$_REQUEST['osts01'];
$osts02 = 1*$_REQUEST['osts02'];
$osts03 = 1*$_REQUEST['osts03'];
$osts04 = 1*$_REQUEST['osts04'];
$osts05 = 1*$_REQUEST['osts05'];
$osts06 = 1*$_REQUEST['osts06'];
$osts07 = 1*$_REQUEST['osts07'];
$osts08 = 1*$_REQUEST['osts08'];
$osts09 = 1*$_REQUEST['osts09'];
$osts10 = 1*$_REQUEST['osts10'];
$zoss01 = 1*$_REQUEST['zoss01'];
$zoss02 = 1*$_REQUEST['zoss02'];
$zoss03 = 1*$_REQUEST['zoss03'];
$zoss04 = 1*$_REQUEST['zoss04'];
$zoss05 = 1*$_REQUEST['zoss05'];
$zoss06 = 1*$_REQUEST['zoss06'];
$zoss07 = 1*$_REQUEST['zoss07'];
$zoss08 = 1*$_REQUEST['zoss08'];
$zoss09 = 1*$_REQUEST['zoss09'];
$zoss10 = 1*$_REQUEST['zoss10'];
$uprtxt = "UPDATE F$kli_vxcf"."_uctvykaz_fin304 SET ".
" znis01='$znis01', znis02='$znis02', znis03='$znis03', znis04='$znis04', znis05='$znis05',
znis06='$znis06', znis07='$znis07', znis08='$znis08', znis09='$znis09', znis10='$znis10',
oces01='$oces01', oces02='$oces02', oces03='$oces03', oces04='$oces04', oces05='$oces05',
oces06='$oces06', oces07='$oces07', oces08='$oces08', oces09='$oces09', oces10='$oces10',
osts01='$osts01', osts02='$osts02', osts03='$osts03', osts04='$osts04', osts05='$osts05',
osts06='$osts06', osts07='$osts07', osts08='$osts08', osts09='$osts09', osts10='$osts10',
zoss01='$zoss01', zoss02='$zoss02', zoss03='$zoss03', zoss04='$zoss04', zoss05='$zoss05',
zoss06='$zoss06', zoss07='$zoss07', zoss08='$zoss08', zoss09='$zoss09', zoss10='$zoss10' ".
" WHERE oc = $cislo_oc";
}
//echo $uprtxt;
$upravene = mysql_query("$uprtxt");
$nepoc = 1*$_REQUEST['nepoc'];
$vsetkyprepocty=1;
if ( $nepoc == 1 ) $vsetkyprepocty=0;
$copern=20;
if (!$upravene):
?>
<script type="text/javascript"> alert( "ÚDAJE NEBOLI UPRAVENÉ" ) </script>
<?php
endif;
if ($upravene):
$uprav="OK";
endif;
}
//koniec zapisu upravenych udajov
//prac.subor a subor
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykaz'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykazx'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykazz'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
$sql = "SELECT pocs01 FROM F".$kli_vxcf."_uctvykaz_fin304";
$vysledok = mysql_query($sql);
if (!$vysledok)
{
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctvykaz_fin304';
$vysledok = mysql_query("$sqlt");
$pocdes="10,2";
$sqlt = <<<mzdprc
(
px08 DECIMAL($pocdes) DEFAULT 0,
oc INT(7) DEFAULT 0,
druh DECIMAL(10,0) DEFAULT 0,
okres VARCHAR(11),
obec VARCHAR(11),
daz DATE,
kor INT,
prx INT,
uce VARCHAR(11),
ucm VARCHAR(11),
ucd VARCHAR(11),
rdk INT,
prv INT,
hod DECIMAL($pocdes),
mdt DECIMAL($pocdes),
dal DECIMAL($pocdes),
pocs01 DECIMAL($pocdes),
pocs02 DECIMAL($pocdes),
pocs03 DECIMAL($pocdes),
pocs04 DECIMAL($pocdes),
pocs05 DECIMAL($pocdes),
pocs06 DECIMAL($pocdes),
pocs07 DECIMAL($pocdes),
pocs08 DECIMAL($pocdes),
pocs09 DECIMAL($pocdes),
pocs10 DECIMAL($pocdes),
zvys01 DECIMAL($pocdes),
zvys02 DECIMAL($pocdes),
zvys03 DECIMAL($pocdes),
zvys04 DECIMAL($pocdes),
zvys05 DECIMAL($pocdes),
zvys06 DECIMAL($pocdes),
zvys07 DECIMAL($pocdes),
zvys08 DECIMAL($pocdes),
zvys09 DECIMAL($pocdes),
zvys10 DECIMAL($pocdes),
znis01 DECIMAL($pocdes),
znis02 DECIMAL($pocdes),
znis03 DECIMAL($pocdes),
znis04 DECIMAL($pocdes),
znis05 DECIMAL($pocdes),
znis06 DECIMAL($pocdes),
znis07 DECIMAL($pocdes),
znis08 DECIMAL($pocdes),
znis09 DECIMAL($pocdes),
znis10 DECIMAL($pocdes),
oces01 DECIMAL($pocdes),
oces02 DECIMAL($pocdes),
oces03 DECIMAL($pocdes),
oces04 DECIMAL($pocdes),
oces05 DECIMAL($pocdes),
oces06 DECIMAL($pocdes),
oces07 DECIMAL($pocdes),
oces08 DECIMAL($pocdes),
oces09 DECIMAL($pocdes),
oces10 DECIMAL($pocdes),
osts01 DECIMAL($pocdes),
osts02 DECIMAL($pocdes),
osts03 DECIMAL($pocdes),
osts04 DECIMAL($pocdes),
osts05 DECIMAL($pocdes),
osts06 DECIMAL($pocdes),
osts07 DECIMAL($pocdes),
osts08 DECIMAL($pocdes),
osts09 DECIMAL($pocdes),
osts10 DECIMAL($pocdes),
zoss01 DECIMAL($pocdes),
zoss02 DECIMAL($pocdes),
zoss03 DECIMAL($pocdes),
zoss04 DECIMAL($pocdes),
zoss05 DECIMAL($pocdes),
zoss06 DECIMAL($pocdes),
zoss07 DECIMAL($pocdes),
zoss08 DECIMAL($pocdes),
zoss09 DECIMAL($pocdes),
zoss10 DECIMAL($pocdes),
ico INT
);
mzdprc;
$vsql = 'CREATE TABLE F'.$kli_vxcf.'_uctvykaz_fin304'.$sqlt;
$vytvor = mysql_query("$vsql");
}
//koniec vytvorenie
$vsql = 'CREATE TABLE F'.$kli_vxcf.'_uctprcvykaz'.$kli_uzid." SELECT * FROM F$kli_vxcf"."_uctvykaz_fin304";
$vytvor = mysql_query("$vsql");
$vsql = 'CREATE TABLE F'.$kli_vxcf.'_uctprcvykazx'.$kli_uzid." SELECT * FROM F$kli_vxcf"."_uctvykaz_fin304";
$vytvor = mysql_query("$vsql");
$vsql = 'TRUNCATE TABLE F'.$kli_vxcf.'_uctprcvykaz'.$kli_uzid." ";
$vytvor = mysql_query("$vsql");
$vsql = 'TRUNCATE TABLE F'.$kli_vxcf.'_uctprcvykazx'.$kli_uzid." ";
$vytvor = mysql_query("$vsql");
//exit;
$jepotvrd=0;
$sql = "SELECT * FROM F$kli_vxcf"."_uctvykaz_fin304 WHERE oc = $cislo_oc";
$sqldok = mysql_query("$sql");
if (@$zaznam=mysql_data_seek($sqldok,0))
{
$jepotvrd=1;
}
if ( $jepotvrd == 0 ) $subor=1;
$nacitavamhodnoty=0;
//vytvor pracovny subor
if ( $subor == 1 )
{
//zober data z kun
$sql = "SELECT * FROM F$kli_vxcf"."_mzdkun WHERE oc = $cislo_oc";
$sqldok = mysql_query("$sql");
if (@$zaznam=mysql_data_seek($sqldok,0))
{
$riaddok=mysql_fetch_object($sqldok);
$meno=$riaddok->meno;
}
$ttvv = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid ".
" ( oc ) VALUES ".
" ( '$cislo_oc' )";
//$ttqq = mysql_query("$ttvv");
/////////////////////////////////nacitaj hodnoty z ucta do suboru
$nacitavamhodnoty=1;
//zober pociatocny stav uctov
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" pmd,$cislo_oc,0,'','','0000-00-00',".
" 1,0,uce,uce,0,0,0,0,pmd,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"$fir_fico FROM F$kli_vxcf"."_uctosnova".
" WHERE F$kli_vxcf"."_uctosnova.pmd != 0";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" -pda,$cislo_oc,0,'','','0000-00-00',".
" 1,0,uce,uce,0,0,0,0,0,pda,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"$fir_fico FROM F$kli_vxcf"."_uctosnova".
" WHERE F$kli_vxcf"."_uctosnova.pda != 0";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
//exit;
$psys=1;
while ($psys <= 9 )
{
//zober prijmove pokl
if( $psys == 1 ) { $uctovanie="uctpokuct"; $doklad="pokpri"; }
//zober vydavkove pokl
if( $psys == 2 ) { $uctovanie="uctpokuct"; $doklad="pokvyd"; }
//zober bankove
if( $psys == 3 ) { $uctovanie="uctban"; $doklad="banvyp"; }
//zober vseobecne
if( $psys == 4 ) { $uctovanie="uctvsdp"; $doklad="uctvsdh"; }
//zober odberatelske
if( $psys == 5 ) { $uctovanie="uctodb"; $doklad="fakodb"; }
//zober dodavatelske
if( $psys == 6 ) { $uctovanie="uctdod"; $doklad="fakdod"; }
//zober majetok
if( $psys == 7 ) { $uctovanie="uctmaj"; }
//zober majetok
if( $psys == 8 ) { $uctovanie="uctskl"; }
//zober mzdy
if( $psys == 9 ) { $uctovanie="uctmzd"; }
if( $psys <= 6 )
{
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" 0,$cislo_oc,0,'','','0000-00-00',".
"2,0,ucm,ucm,0,0,0,0,F$kli_vxcf"."_$uctovanie.hod,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
" $fir_fico FROM F$kli_vxcf"."_$uctovanie,F$kli_vxcf"."_$doklad".
" WHERE F$kli_vxcf"."_$uctovanie.dok=F$kli_vxcf"."_$doklad.dok AND ucm > 0 AND ume <= $kli_vume";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" 0,$cislo_oc,0,'','','0000-00-00',".
"3,0,ucd,0,ucd,0,0,0,0,F$kli_vxcf"."_$uctovanie.hod,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
" $fir_fico FROM F$kli_vxcf"."_$uctovanie,F$kli_vxcf"."_$doklad".
" WHERE F$kli_vxcf"."_$uctovanie.dok=F$kli_vxcf"."_$doklad.dok AND ucd > 0 AND ume <= $kli_vume";
$dsql = mysql_query("$dsqlt");
}
else
{
//tu budu podsystemy
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" 0,$cislo_oc,0,'','','0000-00-00',".
"2,0,ucm,ucm,0,0,0,0,SUM(hod),0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
" $fir_fico FROM F$kli_vxcf"."_$uctovanie".
" WHERE ( ucm > 0 AND ume <= $kli_vume ) GROUP BY F$kli_vxcf"."_$uctovanie.ucm";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykaz$kli_uzid"." SELECT".
" 0,$cislo_oc,0,'','','0000-00-00',".
"3,0,ucd,0,ucd,0,0,0,0,SUM(hod),".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
"0,0,0,0,0,0,0,0,0,0,".
" $fir_fico FROM F$kli_vxcf"."_$uctovanie".
" WHERE ( ucd > 0 AND ume <= $kli_vume ) GROUP BY F$kli_vxcf"."_$uctovanie.ucd";
$dsql = mysql_query("$dsqlt");
}
$psys=$psys+1;
}
$sqtoz = "UPDATE F$kli_vxcf"."_uctprcvykaz$kli_uzid,F$kli_vxcf"."_genfin304".
" SET rdk=F$kli_vxcf"."_genfin304.crs".
" WHERE LEFT(F$kli_vxcf"."_uctprcvykaz$kli_uzid.uce,3) = LEFT(F$kli_vxcf"."_genfin304.uce,3) ";
//echo $sqtoz;
$oznac = mysql_query("$sqtoz");
$sqtoz = "UPDATE F$kli_vxcf"."_uctprcvykaz$kli_uzid,F$kli_vxcf"."_genfin304".
" SET rdk=F$kli_vxcf"."_genfin304.crs".
" WHERE F$kli_vxcf"."_uctprcvykaz$kli_uzid.uce = F$kli_vxcf"."_genfin304.uce ";
//echo $sqtoz;
$oznac = mysql_query("$sqtoz");
//exit;
//rozdel do riadkov
$rdk=1;
while ($rdk <= 10 )
{
$crdk=$rdk;
if( $rdk < 10 ) $crdk="0".$rdk;
$sqtoz = "UPDATE F$kli_vxcf"."_uctprcvykaz$kli_uzid SET pocs$crdk=mdt-dal WHERE rdk = $rdk AND kor = 1 ";
$oznac = mysql_query("$sqtoz");
$sqtoz = "UPDATE F$kli_vxcf"."_uctprcvykaz$kli_uzid SET zvys$crdk=mdt-dal WHERE rdk = $rdk AND kor = 2 ";
$oznac = mysql_query("$sqtoz");
$sqtoz = "UPDATE F$kli_vxcf"."_uctprcvykaz$kli_uzid SET znis$crdk=-(mdt-dal) WHERE rdk = $rdk AND kor = 3 ";
$oznac = mysql_query("$sqtoz");
$rdk=$rdk+1;
}
//generovane sumarne stlpce
$vsql = "DROP TABLE F".$kli_vxcf."_prcfinneg".$kli_uzid." ";
$vytvor = mysql_query("$vsql");
$vsql = "CREATE TABLE F".$kli_vxcf."_prcfinneg".$kli_uzid." SELECT * FROM F$kli_vxcf"."_uctprcvykaz".$kli_uzid." WHERE rdk < 0 ";
$vytvor = mysql_query("$vsql");
//echo $vsql;
$vsql = "INSERT INTO F".$kli_vxcf."_prcfinneg".$kli_uzid." SELECT * FROM F$kli_vxcf"."_uctprcvykaz$kli_uzid ".
" WHERE ( rdk = 1 OR rdk = 3 OR rdk = 7 OR rdk = 9 ) ";
$vytvor = mysql_query("$vsql");
$sqltt = "SELECT * FROM F$kli_vxcf"."_prcfinneg$kli_uzid WHERE rdk >= 0 GROUP BY uce ";
$sql = mysql_query("$sqltt");
$pol = mysql_num_rows($sql);
if( $pol > 0 )
{
$i=0;
while ($i <= $pol )
{
if (@$zaznam=mysql_data_seek($sql,$i))
{
$hlavicka=mysql_fetch_object($sql);
if( $hlavicka->rdk != 0 ) { echo "Pravdepodobne generovanie v sumárnom alebo (z toho) ståpci, úèet ".$hlavicka->uce." / èíslo ståpca ".$hlavicka->rdk."<br />"; }
}
$i = $i + 1;
}
$sqtoz = "DROP TABLE F$kli_vxcf"."_prcfinneg$kli_uzid ";
//$oznac = mysql_query("$sqtoz");
exit;
}
//koniec vypis
//sumar za riadky
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctprcvykazx$kli_uzid "." SELECT".
" 0,$cislo_oc,0,'','','0000-00-00',".
" 0,1,uce,ucm,ucd,rdk,prv,hod,mdt,dal,".
"SUM(pocs01),SUM(pocs02),SUM(pocs03),SUM(pocs04),SUM(pocs05),SUM(pocs06),SUM(pocs07),SUM(pocs08),SUM(pocs09),SUM(pocs10),".
"SUM(zvys01),SUM(zvys02),SUM(zvys03),SUM(zvys04),SUM(zvys05),SUM(zvys06),SUM(zvys07),SUM(zvys08),SUM(zvys09),SUM(zvys10),".
"SUM(znis01),SUM(znis02),SUM(znis03),SUM(znis04),SUM(znis05),SUM(znis06),SUM(znis07),SUM(znis08),SUM(znis09),SUM(znis10),".
"SUM(oces01),SUM(oces02),SUM(oces03),SUM(oces04),SUM(oces05),SUM(oces06),SUM(oces07),SUM(oces08),SUM(oces09),SUM(oces10),".
"SUM(osts01),SUM(osts02),SUM(osts03),SUM(osts04),SUM(osts05),SUM(osts06),SUM(osts07),SUM(osts08),SUM(osts09),SUM(osts10),".
"SUM(zoss01),SUM(zoss02),SUM(zoss03),SUM(zoss04),SUM(zoss05),SUM(zoss06),SUM(zoss07),SUM(zoss08),SUM(zoss09),SUM(zoss10),".
"$fir_fico".
" FROM F$kli_vxcf"."_uctprcvykaz$kli_uzid".
" WHERE rdk >= 0".
" GROUP BY prx".
"";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
/////////////////////////////////koniec naCITAJ HODNOTY
//uloz
$sqtoz = "DELETE FROM F$kli_vxcf"."_uctvykaz_fin304 WHERE oc = $cislo_oc";
$oznac = mysql_query("$sqtoz");
$dsqlt = "INSERT INTO F$kli_vxcf"."_uctvykaz_fin304".
" SELECT * FROM F$kli_vxcf"."_uctprcvykazx".$kli_uzid." WHERE oc = $cislo_oc AND prx = 1 ".
" GROUP BY oc".
"";
//echo $dsqlt;
$dsql = mysql_query("$dsqlt");
}
//koniec pracovneho suboru pre rocne
//vypocty
if ( $copern == 10 OR $copern == 20 )
{
$sqtoz = "UPDATE F$kli_vxcf"."_uctvykaz_fin304 SET ".
" pocs01=pocs02+pocs04+pocs05, ".
" zvys01=zvys02+zvys04+zvys05, ".
" znis01=znis02+znis04+znis05, ".
" oces01=oces02+oces04+oces05, ".
" osts01=osts02+osts04+osts05 ".
" WHERE oc = $cislo_oc ";
$oznac = mysql_query("$sqtoz");
$sqtoz = "UPDATE F$kli_vxcf"."_uctvykaz_fin304 SET ".
" zoss01=pocs01+zvys01-znis01+oces01+osts01, ".
" zoss02=pocs02+zvys02-znis02+oces02+osts02, ".
" zoss03=pocs03+zvys03-znis03+oces03+osts03, ".
" zoss04=pocs04+zvys04-znis04+oces04+osts04, ".
" zoss05=pocs05+zvys05-znis05+oces05+osts05, ".
" zoss06=pocs06+zvys06-znis06+oces06+osts06, ".
" zoss07=pocs07+zvys07-znis07+oces07+osts07, ".
" zoss08=pocs08+zvys08-znis08+oces08+osts08, ".
" zoss09=pocs09+zvys09-znis09+oces09+osts09, ".
" zoss10=pocs10+zvys10-znis10+oces10+osts10 ".
" WHERE oc = $cislo_oc ";
$oznac = mysql_query("$sqtoz");
}
//koniec vypocty
//nacitaj udaje pre upravu
if ( $copern == 10 OR $copern == 20 )
{
$sqlfir = "SELECT * FROM F$kli_vxcf"."_uctvykaz_fin304 WHERE oc = $cislo_oc ";
$fir_vysledok = mysql_query($sqlfir);
$fir_riadok=mysql_fetch_object($fir_vysledok);
if ( $strana == 1 OR $strana == 9999 )
{
$daz = $fir_riadok->daz;
$daz_sk=SkDatum($daz);
}
if ( $strana == 2 )
{
$pocs01 = $fir_riadok->pocs01;
$pocs02 = $fir_riadok->pocs02;
$pocs03 = $fir_riadok->pocs03;
$pocs04 = $fir_riadok->pocs04;
$pocs05 = $fir_riadok->pocs05;
$pocs06 = $fir_riadok->pocs06;
$pocs07 = $fir_riadok->pocs07;
$pocs08 = $fir_riadok->pocs08;
$pocs09 = $fir_riadok->pocs09;
$pocs10 = $fir_riadok->pocs10;
$zvys01 = $fir_riadok->zvys01;
$zvys02 = $fir_riadok->zvys02;
$zvys03 = $fir_riadok->zvys03;
$zvys04 = $fir_riadok->zvys04;
$zvys05 = $fir_riadok->zvys05;
$zvys06 = $fir_riadok->zvys06;
$zvys07 = $fir_riadok->zvys07;
$zvys08 = $fir_riadok->zvys08;
$zvys09 = $fir_riadok->zvys09;
$zvys10 = $fir_riadok->zvys10;
$r01s01 = $fir_riadok->pocs01;
$r01s02 = $fir_riadok->pocs02;
$r01s03 = $fir_riadok->pocs03;
$r01s04 = $fir_riadok->pocs04;
$r01s05 = $fir_riadok->pocs05;
$r01s06 = $fir_riadok->pocs06;
$r01s07 = $fir_riadok->pocs07;
$r01s08 = $fir_riadok->pocs08;
$r01s09 = $fir_riadok->pocs09;
$r01s10 = $fir_riadok->pocs10;
$r06s01 = $fir_riadok->pocs01;
$r06s02 = $fir_riadok->pocs02;
$r06s03 = $fir_riadok->pocs03;
$r06s04 = $fir_riadok->pocs04;
$r06s05 = $fir_riadok->pocs05;
$r06s06 = $fir_riadok->pocs06;
$r06s07 = $fir_riadok->pocs07;
$r06s08 = $fir_riadok->pocs08;
$r06s09 = $fir_riadok->pocs09;
$r06s10 = $fir_riadok->pocs10;
$r13s01 = $fir_riadok->zvys01;
$r13s02 = $fir_riadok->zvys02;
$r13s03 = $fir_riadok->zvys03;
$r13s04 = $fir_riadok->zvys04;
$r13s05 = $fir_riadok->zvys05;
$r13s06 = $fir_riadok->zvys06;
$r13s07 = $fir_riadok->zvys07;
$r13s08 = $fir_riadok->zvys08;
$r13s09 = $fir_riadok->zvys09;
$r13s10 = $fir_riadok->zvys10;
$r18s01 = $fir_riadok->zvys01;
$r18s02 = $fir_riadok->zvys02;
$r18s03 = $fir_riadok->zvys03;
$r18s04 = $fir_riadok->zvys04;
$r18s05 = $fir_riadok->zvys05;
$r18s06 = $fir_riadok->zvys06;
$r18s07 = $fir_riadok->zvys07;
$r18s08 = $fir_riadok->zvys08;
$r18s09 = $fir_riadok->zvys09;
$r18s10 = $fir_riadok->zvys10;
$r25s01 = $fir_riadok->znis01;
$r25s02 = $fir_riadok->znis02;
$r25s03 = $fir_riadok->znis03;
$r25s04 = $fir_riadok->znis04;
$r25s05 = $fir_riadok->znis05;
$r25s06 = $fir_riadok->znis06;
$r25s07 = $fir_riadok->znis07;
$r25s08 = $fir_riadok->znis08;
$r25s09 = $fir_riadok->znis09;
$r25s10 = $fir_riadok->znis10;
}
if ( $strana == 3 )
{
$znis01 = $fir_riadok->znis01;
$znis02 = $fir_riadok->znis02;
$znis03 = $fir_riadok->znis03;
$znis04 = $fir_riadok->znis04;
$znis05 = $fir_riadok->znis05;
$znis06 = $fir_riadok->znis06;
$znis07 = $fir_riadok->znis07;
$znis08 = $fir_riadok->znis08;
$znis09 = $fir_riadok->znis09;
$znis10 = $fir_riadok->znis10;
$oces01 = $fir_riadok->oces01;
$oces02 = $fir_riadok->oces02;
$oces03 = $fir_riadok->oces03;
$oces04 = $fir_riadok->oces04;
$oces05 = $fir_riadok->oces05;
$oces06 = $fir_riadok->oces06;
$oces07 = $fir_riadok->oces07;
$oces08 = $fir_riadok->oces08;
$oces09 = $fir_riadok->oces09;
$oces10 = $fir_riadok->oces10;
$osts01 = $fir_riadok->osts01;
$osts02 = $fir_riadok->osts02;
$osts03 = $fir_riadok->osts03;
$osts04 = $fir_riadok->osts04;
$osts05 = $fir_riadok->osts05;
$osts06 = $fir_riadok->osts06;
$osts07 = $fir_riadok->osts07;
$osts08 = $fir_riadok->osts08;
$osts09 = $fir_riadok->osts09;
$osts10 = $fir_riadok->osts10;
$zoss01 = $fir_riadok->zoss01;
$zoss02 = $fir_riadok->zoss02;
$zoss03 = $fir_riadok->zoss03;
$zoss04 = $fir_riadok->zoss04;
$zoss05 = $fir_riadok->zoss05;
$zoss06 = $fir_riadok->zoss06;
$zoss07 = $fir_riadok->zoss07;
$zoss08 = $fir_riadok->zoss08;
$zoss09 = $fir_riadok->zoss09;
$zoss10 = $fir_riadok->zoss10;
$r30s01 = $fir_riadok->znis01;
$r30s02 = $fir_riadok->znis02;
$r30s03 = $fir_riadok->znis03;
$r30s04 = $fir_riadok->znis04;
$r30s05 = $fir_riadok->znis05;
$r30s06 = $fir_riadok->znis06;
$r30s07 = $fir_riadok->znis07;
$r30s08 = $fir_riadok->znis08;
$r30s09 = $fir_riadok->znis09;
$r30s10 = $fir_riadok->znis10;
$r39s01 = $fir_riadok->zoss01;
$r39s02 = $fir_riadok->zoss02;
$r39s03 = $fir_riadok->zoss03;
$r39s04 = $fir_riadok->zoss04;
$r39s05 = $fir_riadok->zoss05;
$r39s06 = $fir_riadok->zoss06;
$r39s07 = $fir_riadok->zoss07;
$r39s08 = $fir_riadok->zoss08;
$r39s09 = $fir_riadok->zoss09;
$r39s10 = $fir_riadok->zoss10;
$r44s01 = $fir_riadok->zoss01;
$r44s02 = $fir_riadok->zoss02;
$r44s03 = $fir_riadok->zoss03;
$r44s04 = $fir_riadok->zoss04;
$r44s05 = $fir_riadok->zoss05;
$r44s06 = $fir_riadok->zoss06;
$r44s07 = $fir_riadok->zoss07;
$r44s08 = $fir_riadok->zoss08;
$r44s09 = $fir_riadok->zoss09;
$r44s10 = $fir_riadok->zoss10;
}
mysql_free_result($fir_vysledok);
}
//koniec nacitania
//6-miestne ico
$fir_ficox=$fir_fico; if ( $fir_ficox < 999999 ) { $fir_ficox="00".$fir_ficox; }
//skrateny datum k
//$skutku=substr($datum,0,6);
?>
<HEAD>
<META http-equiv="Content-Type" content="text/html; charset=cp1250">
<link rel="stylesheet" href="../css/reset.css">
<link rel="stylesheet" href="../css/tlaciva.css">
<title>Výkaz FIN 3-04</title>
<style type="text/css">
img.btn-row-tool {
width: 20px;
height: 20px;
}
form input[type=text] {
height: 16px;
line-height: 16px;
padding-left: 2px;
border: 1px solid #39f;
font-size: 12px;
}
div.input-echo {
position: absolute;
font-size: 16px;
background-color: #fff;
font-weight: bold;
}
img.btn-form-tool {
margin: 0 8px;
}
.btn-text {
border: 0;
box-sizing: border-box;
color: #39f;
cursor: pointer;
display: inline-block;
font-size: 14px;
font-weight: 500;
height: 28px;
line-height: 28px;
padding: 0 6px;
text-align: center;
text-transform: uppercase;
/*vertical-align: middle;*/
background-color: transparent;
border-radius: 2px;
}
.btn-text:hover {
background-color: rgba(158,158,158,.2);
}
</style>
<script type="text/javascript">
<?php
//uprava sadzby
if ( $copern == 20 )
{
?>
function ObnovUI()
{
<?php if ( $strana == 1 ) { ?>
document.formv1.daz.value = '<?php echo $daz_sk;?>';
<?php } ?>
<?php if ( $strana == 2 ) { ?>
document.formv1.pocs01.value = '<?php echo $pocs01; ?>';
document.formv1.pocs02.value = '<?php echo $pocs02; ?>';
document.formv1.pocs03.value = '<?php echo $pocs03; ?>';
document.formv1.pocs04.value = '<?php echo $pocs04; ?>';
document.formv1.pocs05.value = '<?php echo $pocs05; ?>';
document.formv1.pocs06.value = '<?php echo $pocs06; ?>';
document.formv1.pocs07.value = '<?php echo $pocs07; ?>';
document.formv1.pocs08.value = '<?php echo $pocs08; ?>';
document.formv1.pocs09.value = '<?php echo $pocs09; ?>';
document.formv1.pocs10.value = '<?php echo $pocs10; ?>';
document.formv1.zvys01.value = '<?php echo $zvys01; ?>';
document.formv1.zvys02.value = '<?php echo $zvys02; ?>';
document.formv1.zvys03.value = '<?php echo $zvys03; ?>';
document.formv1.zvys04.value = '<?php echo $zvys04; ?>';
document.formv1.zvys05.value = '<?php echo $zvys05; ?>';
document.formv1.zvys06.value = '<?php echo $zvys06; ?>';
document.formv1.zvys07.value = '<?php echo $zvys07; ?>';
document.formv1.zvys08.value = '<?php echo $zvys08; ?>';
document.formv1.zvys09.value = '<?php echo $zvys09; ?>';
document.formv1.zvys10.value = '<?php echo $zvys10; ?>';
<?php } ?>
<?php if ( $strana == 3 ) { ?>
document.formv1.znis01.value = '<?php echo $znis01; ?>';
document.formv1.znis02.value = '<?php echo $znis02; ?>';
document.formv1.znis03.value = '<?php echo $znis03; ?>';
document.formv1.znis04.value = '<?php echo $znis04; ?>';
document.formv1.znis05.value = '<?php echo $znis05; ?>';
document.formv1.znis06.value = '<?php echo $znis06; ?>';
document.formv1.znis07.value = '<?php echo $znis07; ?>';
document.formv1.znis08.value = '<?php echo $znis08; ?>';
document.formv1.znis09.value = '<?php echo $znis09; ?>';
document.formv1.znis10.value = '<?php echo $znis10; ?>';
document.formv1.oces01.value = '<?php echo $oces01; ?>';
document.formv1.oces02.value = '<?php echo $oces02; ?>';
document.formv1.oces03.value = '<?php echo $oces03; ?>';
document.formv1.oces04.value = '<?php echo $oces04; ?>';
document.formv1.oces05.value = '<?php echo $oces05; ?>';
document.formv1.oces06.value = '<?php echo $oces06; ?>';
document.formv1.oces07.value = '<?php echo $oces07; ?>';
document.formv1.oces08.value = '<?php echo $oces08; ?>';
document.formv1.oces09.value = '<?php echo $oces09; ?>';
document.formv1.oces10.value = '<?php echo $oces10; ?>';
document.formv1.osts01.value = '<?php echo $osts01; ?>';
document.formv1.osts02.value = '<?php echo $osts02; ?>';
document.formv1.osts03.value = '<?php echo $osts03; ?>';
document.formv1.osts04.value = '<?php echo $osts04; ?>';
document.formv1.osts05.value = '<?php echo $osts05; ?>';
document.formv1.osts06.value = '<?php echo $osts06; ?>';
document.formv1.osts07.value = '<?php echo $osts07; ?>';
document.formv1.osts08.value = '<?php echo $osts08; ?>';
document.formv1.osts09.value = '<?php echo $osts09; ?>';
document.formv1.osts10.value = '<?php echo $osts10; ?>';
document.formv1.zoss01.value = '<?php echo $zoss01; ?>';
document.formv1.zoss02.value = '<?php echo $zoss02; ?>';
document.formv1.zoss03.value = '<?php echo $zoss03; ?>';
document.formv1.zoss04.value = '<?php echo $zoss04; ?>';
document.formv1.zoss05.value = '<?php echo $zoss05; ?>';
document.formv1.zoss06.value = '<?php echo $zoss06; ?>';
document.formv1.zoss07.value = '<?php echo $zoss07; ?>';
document.formv1.zoss08.value = '<?php echo $zoss08; ?>';
document.formv1.zoss09.value = '<?php echo $zoss09; ?>';
document.formv1.zoss10.value = '<?php echo $zoss10; ?>';
<?php } ?>
}
<?php
//koniec uprava
}
?>
<?php
if ( $copern != 20 )
{
?>
function ObnovUI()
{
}
<?php
}
?>
//Z ciarky na bodku
function CiarkaNaBodku(Vstup)
{
if ( Vstup.value.search(/[^0-9.-]/g) != -1) { Vstup.value=Vstup.value.replace(",","."); }
}
function MetodVypln()
{
window.open('<?php echo $jpg_cesta; ?>_vysvetlivky.pdf',
'_blank', 'width=980, height=900, top=0, left=20, status=yes, resizable=yes, scrollbars=yes, menubar=yes, toolbar=yes');
}
function TlacVykaz()
{
window.open('vykaz_fin304_2016.php?cislo_oc=<?php echo $cislo_oc;?>&copern=10&drupoh=1&page=1&subor=0&strana=9999',
'_blank', 'width=1050, height=900, top=0, left=20, status=yes, resizable=yes, scrollbars=yes');
}
function Nacitaj()
{
window.open('vykaz_fin304_2016.php?cislo_oc=<?php echo $cislo_oc;?>&copern=26&drupoh=1&page=1&subor=0&strana=1',
'_self', 'width=1050, height=900, top=0, left=20, status=yes, resizable=yes, scrollbars=yes');
}
function DbfFin304()
{
window.open('fin304dbf_2016.php?cislo_oc=<?php echo $cislo_oc;?>&copern=1&drupoh=1&page=1&subor=0',
'_blank', 'width=1080, height=900, top=0, left=10, status=yes, resizable=yes, scrollbars=yes');
}
function CsvFin304()
{
window.open('vykaz_fin304_csv.php?cislo_oc=<?php echo $cislo_oc;?>&copern=1&drupoh=1&page=1&subor=0',
'_blank', 'width=1080, height=900, top=0, left=10, status=yes, resizable=yes, scrollbars=yes' );
}
</script>
</HEAD>
<BODY onload="ObnovUI();">
<?php
//uprav udaje
if ( $copern == 20 )
{
?>
<div id="wrap-heading">
<table id="heading">
<tr>
<td class="ilogin">EuroSecom</td>
<td class="ilogin" align="right"><?php echo "<strong>UME</strong> $kli_vume <strong>FIR</strong> $kli_vxcf:$kli_nxcf <strong>login</strong> $kli_uzmeno $kli_uzprie / $kli_uzid";?></td>
</tr>
<tr>
<td class="header">FIN 3-04 Finanèné aktíva pod¾a sektorov za
<span style="color:#39f;"><?php echo "$cislo_oc. tvrrok";?></span>
</td>
<td>
<div class="bar-btn-form-tool">
<img src="../obr/ikony/info_blue_icon.png" onclick="MetodVypln();" title="Vysvetlivky na vyplnenie výkazu" class="btn-form-tool">
<?php if ( $kli_vrok < 2018 ) { ?>
<button type="button" onclick="DbfFin304();" title="Export do DBF" class="btn-text toright" style="position: relative; top: -4px;">DBF</button>
<?php } ?>
<?php if ( $kli_vrok >= 2017 ) { ?>
<button type="button" onclick="CsvFin304();" title="Export do CSV" class="btn-text toright" style="position: relative; top: -4px;">CSV</button>
<?php } ?>
<img src="../obr/ikony/download_blue_icon.png" onclick="Nacitaj();" title="Naèíta údaje" class="btn-form-tool">
<img src="../obr/ikony/printer_blue_icon.png" onclick="TlacVykaz();" title="Zobrazi vetky strany v PDF" class="btn-form-tool">
</div>
</td>
</tr>
</table>
</div>
<?php
$sirka=950;
$vyska=1300;
if ( $strana == 2 OR $strana == 3 )
{
$sirka=1250; $vyska=920;
}
?>
<div id="content" style="width:<?php echo $sirka; ?>px; height:<?php echo $vyska; ?>px;">
<FORM name="formv1" method="post" action="../ucto/vykaz_fin304_2016.php?copern=23&cislo_oc=<?php echo $cislo_oc;?>&strana=<?php echo $strana; ?>">
<?php
$clas1="noactive"; $clas2="noactive"; $clas3="noactive";
if ( $strana == 1 ) $clas1="active"; if ( $strana == 2 ) $clas2="active";
if ( $strana == 3 ) $clas3="active";
$source="vykaz_fin304_2016.php";
?>
<div class="navbar">
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=1&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas1; ?> toleft">1</a>
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=2&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas2; ?> toleft">2</a>
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=3&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas3; ?> toleft">3</a>
<?php
$alertnacitaj="";
if ( $nacitavamhodnoty == 1 ) { $alertnacitaj="!!! Údaje sú naèítané !!!"; }
?>
<div class="alert-pocitam"><?php echo $alertnacitaj; ?></div>
<INPUT type="submit" id="uloz" name="uloz" value="Uloi zmeny" class="btn-top-formsave">
</div>
<?php if ( $strana == 1 ) { ?>
<img src="<?php echo $jpg_cesta; ?>_str1.jpg" class="form-background"
alt="<?php echo $jpg_popis; ?> 1.strana 265kB">
<span class="text-echo" style="top:153px; left:403px;"><?php echo $datum; ?></span>
<span class="text-echo" style="top:301px; left:141px;">x</span>
<span class="text-echo" style="top:516px; left:141px; letter-spacing:13.5px;"><?php echo $fir_ficox; ?></span>
<span class="text-echo" style="top:516px; left:342px; letter-spacing:14px;"><?php echo $mesiac; ?></span>
<span class="text-echo" style="top:516px; left:409px; letter-spacing:13.5px;"><?php echo $kli_vrok; ?></span>
<div class="input-echo" style="width:687px; top:574px; left:135px; height:40px; line-height:40px;"><?php echo $fir_fnaz; ?></div>
<div class="input-echo" style="width:687px; top:655px; left:135px; height:19px; line-height:19px; font-size:15px;"><?php echo $fir_uctt02; ?></div>
<div class="input-echo" style="width:687px; top:735.5px; left:135px; height:39.5px; line-height:39.5px;"><?php echo $fir_fuli; ?></div>
<div class="input-echo" style="width:105px; top:816.5px; left:135px; height:19px; line-height:19px;"><?php echo $fir_fpsc; ?></div>
<div class="input-echo" style="width:553px; top:816.5px; left:269px; height:39.5px; line-height:39.5px;"><?php echo $fir_fmes; ?></div>
<div class="input-echo" style="width:687px; top:898px; left:135px; height:19px; line-height:19px; font-size:15px;"><?php echo $fir_fem1; ?></div>
<input type="text" name="daz" id="daz" onkeyup="CiarkaNaBodku(this);"
style="width:80px; top:966px; left:236px; height:22px; line-height:22px; font-size:14px; padding-left:4px;"/>
<?php } ?>
<?php if ( $strana == 2 ) { ?>
<img src="<?php echo $jpg_cesta; ?>_str2.jpg" class="form-background"
alt="<?php echo $jpg_popis; ?> 2.strana 265kB" style="width:1250px; height:1000px;">
<!-- 1.STAV k 1.1. -->
<span class="text-echo" style="top:232px; right:868px;"><?php echo $r01s01; ?></span>
<span class="text-echo" style="top:232px; right:784px;"><?php echo $r01s02; ?></span>
<span class="text-echo" style="top:232px; right:700px;"><?php echo $r01s03; ?></span>
<span class="text-echo" style="top:232px; right:616px;"><?php echo $r01s04; ?></span>
<span class="text-echo" style="top:232px; right:532px;"><?php echo $r01s05; ?></span>
<span class="text-echo" style="top:232px; right:439px;"><?php echo $r01s06; ?></span>
<span class="text-echo" style="top:232px; right:355px;"><?php echo $r01s07; ?></span>
<span class="text-echo" style="top:232px; right:246px;"><?php echo $r01s08; ?></span>
<span class="text-echo" style="top:232px; right:162px;"><?php echo $r01s09; ?></span>
<span class="text-echo" style="top:232px; right:70px;"><?php echo $r01s10; ?></span>
<!-- 6.verejna sprava spolu -->
<span class="text-echo" style="top:365px; right:868px;"><?php echo $r06s01; ?></span>
<span class="text-echo" style="top:365px; right:784px;"><?php echo $r06s02; ?></span>
<span class="text-echo" style="top:365px; right:700px;"><?php echo $r06s03; ?></span>
<span class="text-echo" style="top:365px; right:616px;"><?php echo $r06s04; ?></span>
<span class="text-echo" style="top:365px; right:532px;"><?php echo $r06s05; ?></span>
<span class="text-echo" style="top:365px; right:439px;"><?php echo $r06s06; ?></span>
<span class="text-echo" style="top:365px; right:355px;"><?php echo $r06s07; ?></span>
<span class="text-echo" style="top:365px; right:246px;"><?php echo $r06s08; ?></span>
<span class="text-echo" style="top:365px; right:162px;"><?php echo $r06s09; ?></span>
<span class="text-echo" style="top:365px; right:70px;"><?php echo $r06s10; ?></span>
<!-- 8.uzemna samosprava -->
<?php $top=413; ?>
<input type="text" name="pocs01" id="pocs01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="pocs02" id="pocs02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="pocs03" id="pocs03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="pocs04" id="pocs04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="pocs05" id="pocs05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="pocs06" id="pocs06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="pocs07" id="pocs07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="pocs08" id="pocs08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="pocs09" id="pocs09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="pocs10" id="pocs10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<!-- 13.ZVYSENIE -->
<span class="text-echo" style="top:592px; right:868px;"><?php echo $r13s01; ?></span>
<span class="text-echo" style="top:592px; right:784px;"><?php echo $r13s02; ?></span>
<span class="text-echo" style="top:592px; right:700px;"><?php echo $r13s03; ?></span>
<span class="text-echo" style="top:592px; right:616px;"><?php echo $r13s04; ?></span>
<span class="text-echo" style="top:592px; right:532px;"><?php echo $r13s05; ?></span>
<span class="text-echo" style="top:592px; right:439px;"><?php echo $r13s06; ?></span>
<span class="text-echo" style="top:592px; right:355px;"><?php echo $r13s07; ?></span>
<span class="text-echo" style="top:592px; right:246px;"><?php echo $r13s08; ?></span>
<span class="text-echo" style="top:592px; right:162px;"><?php echo $r13s09; ?></span>
<span class="text-echo" style="top:592px; right:70px;"><?php echo $r13s10; ?></span>
<!-- 18.verejna sprava spolu -->
<span class="text-echo" style="top:725px; right:868px;"><?php echo $r18s01; ?></span>
<span class="text-echo" style="top:725px; right:784px;"><?php echo $r18s02; ?></span>
<span class="text-echo" style="top:725px; right:700px;"><?php echo $r18s03; ?></span>
<span class="text-echo" style="top:725px; right:616px;"><?php echo $r18s04; ?></span>
<span class="text-echo" style="top:725px; right:532px;"><?php echo $r18s05; ?></span>
<span class="text-echo" style="top:725px; right:439px;"><?php echo $r18s06; ?></span>
<span class="text-echo" style="top:725px; right:355px;"><?php echo $r18s07; ?></span>
<span class="text-echo" style="top:725px; right:246px;"><?php echo $r18s08; ?></span>
<span class="text-echo" style="top:725px; right:162px;"><?php echo $r18s09; ?></span>
<span class="text-echo" style="top:725px; right:70px;"><?php echo $r18s10; ?></span>
<!-- 20.uzemna samosprava -->
<?php $top=772; ?>
<input type="text" name="zvys01" id="zvys01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="zvys02" id="zvys02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="zvys03" id="zvys03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="zvys04" id="zvys04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="zvys05" id="zvys05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="zvys06" id="zvys06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="zvys07" id="zvys07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="zvys08" id="zvys08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="zvys09" id="zvys09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="zvys10" id="zvys10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<!-- 25.ZNIZENIE -->
<span class="text-echo" style="top:916px; right:868px;"><?php echo $r25s01; ?></span>
<span class="text-echo" style="top:916px; right:784px;"><?php echo $r25s02; ?></span>
<span class="text-echo" style="top:916px; right:700px;"><?php echo $r25s03; ?></span>
<span class="text-echo" style="top:916px; right:616px;"><?php echo $r25s04; ?></span>
<span class="text-echo" style="top:916px; right:532px;"><?php echo $r25s05; ?></span>
<span class="text-echo" style="top:916px; right:439px;"><?php echo $r25s06; ?></span>
<span class="text-echo" style="top:916px; right:355px;"><?php echo $r25s07; ?></span>
<span class="text-echo" style="top:916px; right:246px;"><?php echo $r25s08; ?></span>
<span class="text-echo" style="top:916px; right:162px;"><?php echo $r25s09; ?></span>
<span class="text-echo" style="top:916px; right:70px;"><?php echo $r25s10; ?></span>
<?php } ?>
<?php if ( $strana == 3 ) { ?>
<img src="<?php echo $jpg_cesta; ?>_str3.jpg" class="form-background"
alt="<?php echo $jpg_popis; ?> 3.strana 265kB" style="width:1250px; height:1000px;">
<!-- 30.verejna sprava spolu -->
<span class="text-echo" style="top:173px; right:868px;"><?php echo $r30s01; ?></span>
<span class="text-echo" style="top:173px; right:784px;"><?php echo $r30s02; ?></span>
<span class="text-echo" style="top:173px; right:700px;"><?php echo $r30s03; ?></span>
<span class="text-echo" style="top:173px; right:616px;"><?php echo $r30s04; ?></span>
<span class="text-echo" style="top:173px; right:532px;"><?php echo $r30s05; ?></span>
<span class="text-echo" style="top:173px; right:439px;"><?php echo $r30s06; ?></span>
<span class="text-echo" style="top:173px; right:355px;"><?php echo $r30s07; ?></span>
<span class="text-echo" style="top:173px; right:246px;"><?php echo $r30s08; ?></span>
<span class="text-echo" style="top:173px; right:162px;"><?php echo $r30s09; ?></span>
<span class="text-echo" style="top:173px; right:70px;"><?php echo $r30s10; ?></span>
<!-- 32.uzemna samosprava -->
<?php $top=221; ?>
<input type="text" name="znis01" id="znis01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="znis02" id="znis02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="znis03" id="znis03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="znis04" id="znis04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="znis05" id="znis05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="znis06" id="znis06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="znis07" id="znis07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="znis08" id="znis08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="znis09" id="znis09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="znis10" id="znis10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<!-- 37.ZMENY V OCENENI -->
<?php $top=357; ?>
<input type="text" name="oces01" id="oces01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="oces02" id="oces02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="oces03" id="oces03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="oces04" id="oces04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="oces05" id="oces05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="oces06" id="oces06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="oces07" id="oces07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="oces08" id="oces08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="oces09" id="oces09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="oces10" id="oces10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<!-- 38.OSTATNE ZMENY -->
<?php $top=393; ?>
<input type="text" name="osts01" id="osts01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="osts02" id="osts02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="osts03" id="osts03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="osts04" id="osts04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="osts05" id="osts05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="osts06" id="osts06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="osts07" id="osts07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="osts08" id="osts08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="osts09" id="osts09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="osts10" id="osts10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<!-- 39.STAV k 31.12. -->
<span class="text-echo" style="top:452px; right:868px;"><?php echo $r39s01; ?></span>
<span class="text-echo" style="top:452px; right:784px;"><?php echo $r39s02; ?></span>
<span class="text-echo" style="top:452px; right:700px;"><?php echo $r39s03; ?></span>
<span class="text-echo" style="top:452px; right:616px;"><?php echo $r39s04; ?></span>
<span class="text-echo" style="top:452px; right:532px;"><?php echo $r39s05; ?></span>
<span class="text-echo" style="top:452px; right:439px;"><?php echo $r39s06; ?></span>
<span class="text-echo" style="top:452px; right:355px;"><?php echo $r39s07; ?></span>
<span class="text-echo" style="top:452px; right:246px;"><?php echo $r39s08; ?></span>
<span class="text-echo" style="top:452px; right:162px;"><?php echo $r39s09; ?></span>
<span class="text-echo" style="top:452px; right:70px;"><?php echo $r39s10; ?></span>
<!-- 44.verejna sprava spolu -->
<span class="text-echo" style="top:585px; right:868px;"><?php echo $r44s01; ?></span>
<span class="text-echo" style="top:585px; right:784px;"><?php echo $r44s02; ?></span>
<span class="text-echo" style="top:585px; right:700px;"><?php echo $r44s03; ?></span>
<span class="text-echo" style="top:585px; right:616px;"><?php echo $r44s04; ?></span>
<span class="text-echo" style="top:585px; right:532px;"><?php echo $r44s05; ?></span>
<span class="text-echo" style="top:585px; right:439px;"><?php echo $r44s06; ?></span>
<span class="text-echo" style="top:585px; right:355px;"><?php echo $r44s07; ?></span>
<span class="text-echo" style="top:585px; right:246px;"><?php echo $r44s08; ?></span>
<span class="text-echo" style="top:585px; right:162px;"><?php echo $r44s09; ?></span>
<span class="text-echo" style="top:585px; right:70px;"><?php echo $r44s10; ?></span>
<!-- 46.uzemna samosprava -->
<?php $top=633; ?>
<input type="text" name="zoss01" id="zoss01" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:295px;"/>
<input type="text" name="zoss02" id="zoss02" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:388px;"/>
<input type="text" name="zoss03" id="zoss03" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:472px;"/>
<input type="text" name="zoss04" id="zoss04" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:556px;"/>
<input type="text" name="zoss05" id="zoss05" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:640px;"/>
<input type="text" name="zoss06" id="zoss06" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:725px;"/>
<input type="text" name="zoss07" id="zoss07" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:817px;"/>
<input type="text" name="zoss08" id="zoss08" onkeyup="CiarkaNaBodku(this);" style="width:98px; top:<?php echo $top; ?>px; left:902px;"/>
<input type="text" name="zoss09" id="zoss09" onkeyup="CiarkaNaBodku(this);" style="width:73px; top:<?php echo $top; ?>px; left:1011px;"/>
<input type="text" name="zoss10" id="zoss10" onkeyup="CiarkaNaBodku(this);" style="width:82px; top:<?php echo $top; ?>px; left:1095px;"/>
<?php } ?>
<div class="navbar">
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=1&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas1; ?> toleft">1</a>
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=2&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas2; ?> toleft">2</a>
<a href="#" onclick="window.open('<?php echo $source; ?>?copern=20&strana=3&cislo_oc=<?php echo $cislo_oc; ?>', '_self');" class="<?php echo $clas3; ?> toleft">3</a>
<INPUT type="submit" id="uloz" name="uloz" value="Uloi zmeny" class="btn-bottom-formsave">
</div>
</FORM>
</div> <!-- #content -->
<?php
}
//koniec uprav
?>
<?php
/////////////////////////////////////////////////VYTLAC
if ( $copern == 10 )
{
if ( File_Exists("../tmp/vykazfin.$kli_uzid.pdf") ) { $soubor = unlink("../tmp/vykazfin.$kli_uzid.pdf"); }
define('FPDF_FONTPATH','../fpdf/font/');
require('../fpdf/fpdf.php');
$sirka_vyska="210,320";
$velkost_strany = explode(",", $sirka_vyska);
$pdf=new FPDF("P","mm", $velkost_strany );
$pdf->Open();
$pdf->AddFont('arial','','arial.php');
//vytlac
$sqltt = "SELECT * FROM F$kli_vxcf"."_uctvykaz_fin304".
" WHERE F$kli_vxcf"."_uctvykaz_fin304.oc = $cislo_oc ORDER BY oc";
$sql = mysql_query("$sqltt");
$pol = mysql_num_rows($sql);
$i=0;
$j=0; //zaciatok strany ak by som chcel strankovat
while ($i <= $pol )
{
if (@$zaznam=mysql_data_seek($sql,$i))
{
$hlavicka=mysql_fetch_object($sql);
if ( $strana == 1 OR $strana == 9999 ) {
$pdf->AddPage();
$pdf->SetFont('arial','',10);
$pdf->SetLeftMargin(10);
$pdf->SetTopMargin(10);
if ( File_Exists($jpg_cesta.'_str1.jpg') )
{
$pdf->Image($jpg_cesta.'_str1.jpg',0,0,210,297);
}
$pdf->SetY(10);
//obdobie k
$text=$datum;
$pdf->Cell(195,19," ","$rmc1",1,"L");
$pdf->Cell(78,6," ","$rmc1",0,"R");$pdf->Cell(22,4,"$text","$rmc",1,"C");
//druh vykazu krizik
$text="x";
$pdf->Cell(195,30.5," ","$rmc1",1,"L");
$pdf->Cell(20,4," ","$rmc1",0,"R");$pdf->Cell(4,3,"$text","$rmc",1,"C");
//ico
$text=$fir_ficox;
$textx="12345678";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$pdf->Cell(195,45.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"R");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(5,5,"$t03","$rmc",0,"C");$pdf->Cell(4,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
//mesiac
$text=$mesiac;
$textx="12345678";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$pdf->Cell(5,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
//rok
$text=$kli_vrok;
$textx="1234";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$pdf->Cell(5,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(5,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",1,"C");
//nazov subjektu
$text=$fir_fnaz;
$textx="0123456789abcdefghijklmnoprstuv";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(195,8.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//
$text=substr($fir_fnaz,31,30);;
//$text="Èý0123456789abcdefghijklmnoprstuv";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//pravna forma
$text=$fir_uctt02;
$textx="0123456789abcdefghijklmnoprstuv";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(195,8.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//ulica a cislo
$text=$fir_fuli." ".$fir_fcdm;
$textx="0123456789abcdefghijklmnoprstuv";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(195,13.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//
$text=" ";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//psc
$fir_fpsc=str_replace(" ","",$fir_fpsc);
$text=$fir_fpsc;
$textx="123456";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$pdf->Cell(195,8.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");
//obec
$text=$fir_fmes;
$textx="123456789abcdefghijklmnov";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$pdf->Cell(5,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(5,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(4,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(5,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",1,"C");
//
$text=" ";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$pdf->Cell(49,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(5,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(4,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(5,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",1,"C");
//email
$text=$fir_fem1;
$textx="0123456789abcdefghijklmnoprstuv";
$t01=substr($text,0,1);
$t02=substr($text,1,1);
$t03=substr($text,2,1);
$t04=substr($text,3,1);
$t05=substr($text,4,1);
$t06=substr($text,5,1);
$t07=substr($text,6,1);
$t08=substr($text,7,1);
$t09=substr($text,8,1);
$t10=substr($text,9,1);
$t11=substr($text,10,1);
$t12=substr($text,11,1);
$t13=substr($text,12,1);
$t14=substr($text,13,1);
$t15=substr($text,14,1);
$t16=substr($text,15,1);
$t17=substr($text,16,1);
$t18=substr($text,17,1);
$t19=substr($text,18,1);
$t20=substr($text,19,1);
$t21=substr($text,20,1);
$t22=substr($text,21,1);
$t23=substr($text,22,1);
$t24=substr($text,23,1);
$t25=substr($text,24,1);
$t26=substr($text,25,1);
$t27=substr($text,26,1);
$t28=substr($text,27,1);
$t29=substr($text,28,1);
$t30=substr($text,29,1);
$t31=substr($text,30,1);
$pdf->Cell(195,8.5," ","$rmc1",1,"L");
$pdf->Cell(20,5," ","$rmc1",0,"C");
$pdf->Cell(5,5,"$t01","$rmc",0,"C");$pdf->Cell(5,5,"$t02","$rmc",0,"C");
$pdf->Cell(4,5,"$t03","$rmc",0,"C");$pdf->Cell(5,5,"$t04","$rmc",0,"C");
$pdf->Cell(5,5,"$t05","$rmc",0,"C");$pdf->Cell(5,5,"$t06","$rmc",0,"C");
$pdf->Cell(5,5,"$t07","$rmc",0,"C");$pdf->Cell(5,5,"$t08","$rmc",0,"C");
$pdf->Cell(5,5,"$t09","$rmc",0,"C");$pdf->Cell(5,5,"$t10","$rmc",0,"C");
$pdf->Cell(5,5,"$t11","$rmc",0,"C");$pdf->Cell(5,5,"$t12","$rmc",0,"C");
$pdf->Cell(5,5,"$t13","$rmc",0,"C");$pdf->Cell(5,5,"$t14","$rmc",0,"C");
$pdf->Cell(5,5,"$t15","$rmc",0,"C");$pdf->Cell(5,5,"$t16","$rmc",0,"C");
$pdf->Cell(5,5,"$t17","$rmc",0,"C");$pdf->Cell(5,5,"$t18","$rmc",0,"C");
$pdf->Cell(5,5,"$t19","$rmc",0,"C");$pdf->Cell(4,5,"$t20","$rmc",0,"C");
$pdf->Cell(5,5,"$t21","$rmc",0,"C");$pdf->Cell(5,5,"$t22","$rmc",0,"C");
$pdf->Cell(5,5,"$t23","$rmc",0,"C");$pdf->Cell(5,5,"$t24","$rmc",0,"C");
$pdf->Cell(5,5,"$t25","$rmc",0,"C");$pdf->Cell(5,5,"$t26","$rmc",0,"C");
$pdf->Cell(5,5,"$t27","$rmc",0,"C");$pdf->Cell(5,5,"$t28","$rmc",0,"C");
$pdf->Cell(5,5,"$t29","$rmc",0,"C");$pdf->Cell(5,5,"$t30","$rmc",0,"C");
$pdf->Cell(5,5,"$t31","$rmc",1,"C");
//datum zostavenia
$daz= SkDatum($hlavicka->daz);
if ( $daz == '00.00.0000' ) $daz="";
$pdf->Cell(195,12," ","$rmc1",1,"L");
$pdf->Cell(40,5," ","$rmc1",0,"C");$pdf->Cell(22,4,"$daz","$rmc",1,"C");
}
if ( $strana == 2 OR $strana == 9999 ) {
$pdf->AddPage(L);
$pdf->SetFont('arial','',8);
$pdf->SetLeftMargin(10);
$pdf->SetTopMargin(10);
if ( File_Exists($jpg_cesta.'_str2.jpg') )
{
$pdf->Image($jpg_cesta.'_str2.jpg',5,0,305,200);
}
$pdf->SetY(10);
//VYBRANE AKTIVA
$pocs01=$hlavicka->pocs01; if ( $hlavicka->pocs01 == 0 ) $pocs01="";
$pocs02=$hlavicka->pocs02; if ( $hlavicka->pocs02 == 0 ) $pocs02="";
$pocs03=$hlavicka->pocs03; if ( $hlavicka->pocs03 == 0 ) $pocs03="";
$pocs04=$hlavicka->pocs04; if ( $hlavicka->pocs04 == 0 ) $pocs04="";
$pocs05=$hlavicka->pocs05; if ( $hlavicka->pocs05 == 0 ) $pocs05="";
$pocs06=$hlavicka->pocs06; if ( $hlavicka->pocs06 == 0 ) $pocs06="";
$pocs07=$hlavicka->pocs07; if ( $hlavicka->pocs07 == 0 ) $pocs07="";
$pocs08=$hlavicka->pocs08; if ( $hlavicka->pocs08 == 0 ) $pocs08="";
$pocs09=$hlavicka->pocs09; if ( $hlavicka->pocs09 == 0 ) $pocs09="";
$pocs10=$hlavicka->pocs10; if ( $hlavicka->pocs10 == 0 ) $pocs10="";
$r01s01=$pocs01;
$r01s02=$pocs02;
$r01s03=$pocs03;
$r01s04=$pocs04;
$r01s05=$pocs05;
$r01s06=$pocs06;
$r01s07=$pocs07;
$r01s08=$pocs08;
$r01s09=$pocs09;
$r01s10=$pocs10;
$r06s01=$pocs01; if ( $pocs01 == 0 ) $r06s01="";
$r06s02=$pocs02; if ( $pocs02 == 0 ) $r06s02="";
$r06s03=$pocs03; if ( $pocs03 == 0 ) $r06s03="";
$r06s04=$pocs04; if ( $pocs04 == 0 ) $r06s04="";
$r06s05=$pocs05; if ( $pocs05 == 0 ) $r06s05="";
$r06s06=$pocs06; if ( $pocs06 == 0 ) $r06s06="";
$r06s07=$pocs07; if ( $pocs07 == 0 ) $r06s07="";
$r06s08=$pocs08; if ( $pocs08 == 0 ) $r06s08="";
$r06s09=$pocs09; if ( $pocs09 == 0 ) $r06s09="";
$r06s10=$pocs10; if ( $pocs10 == 0 ) $r06s10="";
$zvys01=$hlavicka->zvys01; if ( $hlavicka->zvys01 == 0 ) $zvys01="";
$zvys02=$hlavicka->zvys02; if ( $hlavicka->zvys02 == 0 ) $zvys02="";
$zvys03=$hlavicka->zvys03; if ( $hlavicka->zvys03 == 0 ) $zvys03="";
$zvys04=$hlavicka->zvys04; if ( $hlavicka->zvys04 == 0 ) $zvys04="";
$zvys05=$hlavicka->zvys05; if ( $hlavicka->zvys05 == 0 ) $zvys05="";
$zvys06=$hlavicka->zvys06; if ( $hlavicka->zvys06 == 0 ) $zvys06="";
$zvys07=$hlavicka->zvys07; if ( $hlavicka->zvys07 == 0 ) $zvys07="";
$zvys08=$hlavicka->zvys08; if ( $hlavicka->zvys08 == 0 ) $zvys08="";
$zvys09=$hlavicka->zvys09; if ( $hlavicka->zvys09 == 0 ) $zvys09="";
$zvys10=$hlavicka->zvys10; if ( $hlavicka->zvys10 == 0 ) $zvys10="";
$r13s01=$zvys01;
$r13s02=$zvys02;
$r13s03=$zvys03;
$r13s04=$zvys04;
$r13s05=$zvys05;
$r13s06=$zvys06;
$r13s07=$zvys07;
$r13s08=$zvys08;
$r13s09=$zvys09;
$r13s10=$zvys10;
$r18s01=$zvys01; if ( $zvys01 == 0 ) $r18s01="";
$r18s02=$zvys02; if ( $zvys02 == 0 ) $r18s02="";
$r18s03=$zvys03; if ( $zvys03 == 0 ) $r18s03="";
$r18s04=$zvys04; if ( $zvys04 == 0 ) $r18s04="";
$r18s05=$zvys05; if ( $zvys05 == 0 ) $r18s05="";
$r18s06=$zvys06; if ( $zvys06 == 0 ) $r18s06="";
$r18s07=$zvys07; if ( $zvys07 == 0 ) $r18s07="";
$r18s08=$zvys08; if ( $zvys08 == 0 ) $r18s08="";
$r18s09=$zvys09; if ( $zvys09 == 0 ) $r18s09="";
$r18s10=$zvys10; if ( $zvys10 == 0 ) $r18s10="";
$znis01=$hlavicka->znis01; if ( $hlavicka->znis01 == 0 ) $znis01="";
$znis02=$hlavicka->znis02; if ( $hlavicka->znis02 == 0 ) $znis02="";
$znis03=$hlavicka->znis03; if ( $hlavicka->znis03 == 0 ) $znis03="";
$znis04=$hlavicka->znis04; if ( $hlavicka->znis04 == 0 ) $znis04="";
$znis05=$hlavicka->znis05; if ( $hlavicka->znis05 == 0 ) $znis05="";
$znis06=$hlavicka->znis06; if ( $hlavicka->znis06 == 0 ) $znis06="";
$znis07=$hlavicka->znis07; if ( $hlavicka->znis07 == 0 ) $znis07="";
$znis08=$hlavicka->znis08; if ( $hlavicka->znis08 == 0 ) $znis08="";
$znis09=$hlavicka->znis09; if ( $hlavicka->znis09 == 0 ) $znis09="";
$znis10=$hlavicka->znis10; if ( $hlavicka->znis10 == 0 ) $znis10="";
$r25s01=$znis01;
$r25s02=$znis02;
$r25s03=$znis03;
$r25s04=$znis04;
$r25s05=$znis05;
$r25s06=$znis06;
$r25s07=$znis07;
$r25s08=$znis08;
$r25s09=$znis09;
$r25s10=$znis10;
//1.STAV k 1.1.
$pdf->SetY(38.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,7,"$r01s01","$rmc",0,"R");$pdf->Cell(20,7,"$r01s02","$rmc",0,"R");
$pdf->Cell(21,7,"$r01s03","$rmc",0,"R");$pdf->Cell(20,7,"$r01s04","$rmc",0,"R");
$pdf->Cell(21,7,"$r01s05","$rmc",0,"R");$pdf->Cell(22.5,7,"$r01s06","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r01s07","$rmc",0,"R");$pdf->Cell(27,7,"$r01s08","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r01s09","$rmc",0,"R");$pdf->Cell(22,7,"$r01s10","$rmc",1,"R");
//6.verejna sprava spolu
$pdf->SetY(66.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$r06s01","$rmc",0,"R");$pdf->Cell(20,6,"$r06s02","$rmc",0,"R");
$pdf->Cell(21,6,"$r06s03","$rmc",0,"R");$pdf->Cell(20,6,"$r06s04","$rmc",0,"R");
$pdf->Cell(21,6,"$r06s05","$rmc",0,"R");$pdf->Cell(22.5,6,"$r06s06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r06s07","$rmc",0,"R");$pdf->Cell(27,6,"$r06s08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r06s09","$rmc",0,"R");$pdf->Cell(22,6,"$r06s10","$rmc",1,"R");
//8.uzemna samosprava
$pdf->SetY(77);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$pocs01","$rmc",0,"R");$pdf->Cell(20,6,"$pocs02","$rmc",0,"R");
$pdf->Cell(21,6,"$pocs03","$rmc",0,"R");$pdf->Cell(20,6,"$pocs04","$rmc",0,"R");
$pdf->Cell(21,6,"$pocs05","$rmc",0,"R");$pdf->Cell(22.5,6,"$pocs06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$pocs07","$rmc",0,"R");$pdf->Cell(27,6,"$pocs08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$pocs09","$rmc",0,"R");$pdf->Cell(22,6,"$pocs10","$rmc",1,"R");
//13.ZVYSENIE
$pdf->SetY(110.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,7,"$r13s01","$rmc",0,"R");$pdf->Cell(20,7,"$r13s02","$rmc",0,"R");
$pdf->Cell(21,7,"$r13s03","$rmc",0,"R");$pdf->Cell(20,7,"$r13s04","$rmc",0,"R");
$pdf->Cell(21,7,"$r13s05","$rmc",0,"R");$pdf->Cell(22.5,7,"$r13s06","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r13s07","$rmc",0,"R");$pdf->Cell(27,7,"$r13s08","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r13s09","$rmc",0,"R");$pdf->Cell(22,7,"$r13s10","$rmc",1,"R");
//18.verejna sprava spolu
$pdf->SetY(138.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$r18s01","$rmc",0,"R");$pdf->Cell(20,6,"$r18s02","$rmc",0,"R");
$pdf->Cell(21,6,"$r18s03","$rmc",0,"R");$pdf->Cell(20,6,"$r18s04","$rmc",0,"R");
$pdf->Cell(21,6,"$r18s05","$rmc",0,"R");$pdf->Cell(22.5,6,"$r18s06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r18s07","$rmc",0,"R");$pdf->Cell(27,6,"$r18s08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r18s09","$rmc",0,"R");$pdf->Cell(22,6,"$r18s10","$rmc",1,"R");
//20.uzemna samosprava
$pdf->SetY(149);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$zvys01","$rmc",0,"R");$pdf->Cell(20,6,"$zvys02","$rmc",0,"R");
$pdf->Cell(21,6,"$zvys03","$rmc",0,"R");$pdf->Cell(20,6,"$zvys04","$rmc",0,"R");
$pdf->Cell(21,6,"$zvys05","$rmc",0,"R");$pdf->Cell(22.5,6,"$zvys06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$zvys07","$rmc",0,"R");$pdf->Cell(27,6,"$zvys08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$zvys09","$rmc",0,"R");$pdf->Cell(22,6,"$zvys10","$rmc",1,"R");
//25.ZNIZENIE
$pdf->SetY(175);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,7,"$r25s01","$rmc",0,"R");$pdf->Cell(20,7,"$r25s02","$rmc",0,"R");
$pdf->Cell(21,7,"$r25s03","$rmc",0,"R");$pdf->Cell(20,7,"$r25s04","$rmc",0,"R");
$pdf->Cell(21,7,"$r25s05","$rmc",0,"R");$pdf->Cell(22.5,7,"$r25s06","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r25s07","$rmc",0,"R");$pdf->Cell(27,7,"$r25s08","$rmc",0,"R");
$pdf->Cell(20.5,7,"$r25s09","$rmc",0,"R");$pdf->Cell(22,7,"$r25s10","$rmc",1,"R");
}
if ( $strana == 3 OR $strana == 9999 ) {
$pdf->AddPage(L);
$pdf->SetFont('arial','',8);
$pdf->SetLeftMargin(10);
$pdf->SetTopMargin(10);
if ( File_Exists($jpg_cesta.'_str3.jpg') )
{
$pdf->Image($jpg_cesta.'_str3.jpg',5,0,305,200);
}
$pdf->SetY(10);
//VYBRANE AKTIVA
$znis01=$hlavicka->znis01; if ( $hlavicka->znis01 == 0 ) $znis01="";
$znis02=$hlavicka->znis02; if ( $hlavicka->znis02 == 0 ) $znis02="";
$znis03=$hlavicka->znis03; if ( $hlavicka->znis03 == 0 ) $znis03="";
$znis04=$hlavicka->znis04; if ( $hlavicka->znis04 == 0 ) $znis04="";
$znis05=$hlavicka->znis05; if ( $hlavicka->znis05 == 0 ) $znis05="";
$znis06=$hlavicka->znis06; if ( $hlavicka->znis06 == 0 ) $znis06="";
$znis07=$hlavicka->znis07; if ( $hlavicka->znis07 == 0 ) $znis07="";
$znis08=$hlavicka->znis08; if ( $hlavicka->znis08 == 0 ) $znis08="";
$znis09=$hlavicka->znis09; if ( $hlavicka->znis09 == 0 ) $znis09="";
$znis10=$hlavicka->znis10; if ( $hlavicka->znis10 == 0 ) $znis10="";
$r30s01=$znis01; if ( $znis01 == 0 ) $r30s01="";
$r30s02=$znis02; if ( $znis02 == 0 ) $r30s02="";
$r30s03=$znis03; if ( $znis03 == 0 ) $r30s03="";
$r30s04=$znis04; if ( $znis04 == 0 ) $r30s04="";
$r30s05=$znis05; if ( $znis05 == 0 ) $r30s05="";
$r30s06=$znis06; if ( $znis06 == 0 ) $r30s06="";
$r30s07=$znis07; if ( $znis07 == 0 ) $r30s07="";
$r30s08=$znis08; if ( $znis08 == 0 ) $r30s08="";
$r30s09=$znis09; if ( $znis09 == 0 ) $r30s09="";
$r30s10=$znis10; if ( $znis10 == 0 ) $r30s10="";
$oces01=$hlavicka->oces01; if ( $hlavicka->oces01 == 0 ) $oces01="";
$oces02=$hlavicka->oces02; if ( $hlavicka->oces02 == 0 ) $oces02="";
$oces03=$hlavicka->oces03; if ( $hlavicka->oces03 == 0 ) $oces03="";
$oces04=$hlavicka->oces04; if ( $hlavicka->oces04 == 0 ) $oces04="";
$oces05=$hlavicka->oces05; if ( $hlavicka->oces05 == 0 ) $oces05="";
$oces06=$hlavicka->oces06; if ( $hlavicka->oces06 == 0 ) $oces06="";
$oces07=$hlavicka->oces07; if ( $hlavicka->oces07 == 0 ) $oces07="";
$oces08=$hlavicka->oces08; if ( $hlavicka->oces08 == 0 ) $oces08="";
$oces09=$hlavicka->oces09; if ( $hlavicka->oces09 == 0 ) $oces09="";
$oces10=$hlavicka->oces10; if ( $hlavicka->oces10 == 0 ) $oces10="";
$osts01=$hlavicka->osts01; if ( $hlavicka->osts01 == 0 ) $osts01="";
$osts02=$hlavicka->osts02; if ( $hlavicka->osts02 == 0 ) $osts02="";
$osts03=$hlavicka->osts03; if ( $hlavicka->osts03 == 0 ) $osts03="";
$osts04=$hlavicka->osts04; if ( $hlavicka->osts04 == 0 ) $osts04="";
$osts05=$hlavicka->osts05; if ( $hlavicka->osts05 == 0 ) $osts05="";
$osts06=$hlavicka->osts06; if ( $hlavicka->osts06 == 0 ) $osts06="";
$osts07=$hlavicka->osts07; if ( $hlavicka->osts07 == 0 ) $osts07="";
$osts08=$hlavicka->osts08; if ( $hlavicka->osts08 == 0 ) $osts08="";
$osts09=$hlavicka->osts09; if ( $hlavicka->osts09 == 0 ) $osts09="";
$osts10=$hlavicka->osts10; if ( $hlavicka->osts10 == 0 ) $osts10="";
$zoss01=$hlavicka->zoss01; if ( $hlavicka->zoss01 == 0 ) $zoss01="";
$zoss02=$hlavicka->zoss02; if ( $hlavicka->zoss02 == 0 ) $zoss02="";
$zoss03=$hlavicka->zoss03; if ( $hlavicka->zoss03 == 0 ) $zoss03="";
$zoss04=$hlavicka->zoss04; if ( $hlavicka->zoss04 == 0 ) $zoss04="";
$zoss05=$hlavicka->zoss05; if ( $hlavicka->zoss05 == 0 ) $zoss05="";
$zoss06=$hlavicka->zoss06; if ( $hlavicka->zoss06 == 0 ) $zoss06="";
$zoss07=$hlavicka->zoss07; if ( $hlavicka->zoss07 == 0 ) $zoss07="";
$zoss08=$hlavicka->zoss08; if ( $hlavicka->zoss08 == 0 ) $zoss08="";
$zoss09=$hlavicka->zoss09; if ( $hlavicka->zoss09 == 0 ) $zoss09="";
$zoss10=$hlavicka->zoss10; if ( $hlavicka->zoss10 == 0 ) $zoss10="";
$r39s01=$zoss01;
$r39s02=$zoss02;
$r39s03=$zoss03;
$r39s04=$zoss04;
$r39s05=$zoss05;
$r39s06=$zoss06;
$r39s07=$zoss07;
$r39s08=$zoss08;
$r39s09=$zoss09;
$r39s10=$zoss10;
$r44s01=$zoss01; if ( $zoss01 == 0 ) $r44s01="";
$r44s02=$zoss02; if ( $zoss02 == 0 ) $r44s02="";
$r44s03=$zoss03; if ( $zoss03 == 0 ) $r44s03="";
$r44s04=$zoss04; if ( $zoss04 == 0 ) $r44s04="";
$r44s05=$zoss05; if ( $zoss05 == 0 ) $r44s05="";
$r44s06=$zoss06; if ( $zoss06 == 0 ) $r44s06="";
$r44s07=$zoss07; if ( $zoss07 == 0 ) $r44s07="";
$r44s08=$zoss08; if ( $zoss08 == 0 ) $r44s08="";
$r44s09=$zoss09; if ( $zoss09 == 0 ) $r44s09="";
$r44s10=$zoss10; if ( $zoss10 == 0 ) $r44s10="";
//30.verejna sprava spolu
$pdf->SetY(28);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$r30s01","$rmc",0,"R");$pdf->Cell(20,6,"$r30s02","$rmc",0,"R");
$pdf->Cell(21,6,"$r30s03","$rmc",0,"R");$pdf->Cell(20,6,"$r30s04","$rmc",0,"R");
$pdf->Cell(21,6,"$r30s05","$rmc",0,"R");$pdf->Cell(22.5,6,"$r30s06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r30s07","$rmc",0,"R");$pdf->Cell(27,6,"$r30s08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r30s09","$rmc",0,"R");$pdf->Cell(22,6,"$r30s10","$rmc",1,"R");
//32.uzemna samosprava
$pdf->SetY(38.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$znis01","$rmc",0,"R");$pdf->Cell(20,6,"$znis02","$rmc",0,"R");
$pdf->Cell(21,6,"$znis03","$rmc",0,"R");$pdf->Cell(20,6,"$znis04","$rmc",0,"R");
$pdf->Cell(21,6,"$znis05","$rmc",0,"R");$pdf->Cell(22.5,6,"$znis06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$znis07","$rmc",0,"R");$pdf->Cell(27,6,"$znis08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$znis09","$rmc",0,"R");$pdf->Cell(22,6,"$znis10","$rmc",1,"R");
//37.ZMENY V OCENENI
$pdf->SetY(65.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,7,"$oces01","$rmc",0,"R");$pdf->Cell(20,7,"$oces02","$rmc",0,"R");
$pdf->Cell(21,7,"$oces03","$rmc",0,"R");$pdf->Cell(20,7,"$oces04","$rmc",0,"R");
$pdf->Cell(21,7,"$oces05","$rmc",0,"R");$pdf->Cell(22.5,7,"$oces06","$rmc",0,"R");
$pdf->Cell(20.5,7,"$oces07","$rmc",0,"R");$pdf->Cell(27,7,"$oces08","$rmc",0,"R");
$pdf->Cell(20.5,7,"$oces09","$rmc",0,"R");$pdf->Cell(22,7,"$oces10","$rmc",1,"R");
//38.OSTATNE ZMENY
$pdf->SetY(72.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,7,"$osts01","$rmc",0,"R");$pdf->Cell(20,7,"$osts02","$rmc",0,"R");
$pdf->Cell(21,7,"$osts03","$rmc",0,"R");$pdf->Cell(20,7,"$osts04","$rmc",0,"R");
$pdf->Cell(21,7,"$osts05","$rmc",0,"R");$pdf->Cell(22.5,7,"$osts06","$rmc",0,"R");
$pdf->Cell(20.5,7,"$osts07","$rmc",0,"R");$pdf->Cell(27,7,"$osts08","$rmc",0,"R");
$pdf->Cell(20.5,7,"$osts09","$rmc",0,"R");$pdf->Cell(22,7,"$osts10","$rmc",1,"R");
//39.STAV k 31.12.
$pdf->SetY(80);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,9,"$r39s01","$rmc",0,"R");$pdf->Cell(20,9,"$r39s02","$rmc",0,"R");
$pdf->Cell(21,9,"$r39s03","$rmc",0,"R");$pdf->Cell(20,9,"$r39s04","$rmc",0,"R");
$pdf->Cell(21,9,"$r39s05","$rmc",0,"R");$pdf->Cell(22.5,9,"$r39s06","$rmc",0,"R");
$pdf->Cell(20.5,9,"$r39s07","$rmc",0,"R");$pdf->Cell(27,9,"$r39s08","$rmc",0,"R");
$pdf->Cell(20.5,9,"$r39s09","$rmc",0,"R");$pdf->Cell(22,9,"$r39s10","$rmc",1,"R");
//44.verejna sprava spolu
$pdf->SetY(110.5);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$r44s01","$rmc",0,"R");$pdf->Cell(20,6,"$r44s02","$rmc",0,"R");
$pdf->Cell(21,6,"$r44s03","$rmc",0,"R");$pdf->Cell(20,6,"$r44s04","$rmc",0,"R");
$pdf->Cell(21,6,"$r44s05","$rmc",0,"R");$pdf->Cell(22.5,6,"$r44s06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r44s07","$rmc",0,"R");$pdf->Cell(27,6,"$r44s08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$r44s09","$rmc",0,"R");$pdf->Cell(22,6,"$r44s10","$rmc",1,"R");
//46.uzemna samosprava
$pdf->SetY(121);
$pdf->Cell(66,3," ","$rmc1",0,"R");
$pdf->Cell(23,6,"$zoss01","$rmc",0,"R");$pdf->Cell(20,6,"$zoss02","$rmc",0,"R");
$pdf->Cell(21,6,"$zoss03","$rmc",0,"R");$pdf->Cell(20,6,"$zoss04","$rmc",0,"R");
$pdf->Cell(21,6,"$zoss05","$rmc",0,"R");$pdf->Cell(22.5,6,"$zoss06","$rmc",0,"R");
$pdf->Cell(20.5,6,"$zoss07","$rmc",0,"R");$pdf->Cell(27,6,"$zoss08","$rmc",0,"R");
$pdf->Cell(20.5,6,"$zoss09","$rmc",0,"R");$pdf->Cell(22,6,"$zoss10","$rmc",1,"R");
}
}
$i = $i + 1;
}
$pdf->Output("../tmp/vykazfin.$kli_uzid.pdf");
?>
<script type="text/javascript">
var okno = window.open("../tmp/vykazfin.<?php echo $kli_uzid; ?>.pdf","_self");
</script>
<?php
}
/////////////////////////////////////////KONIEC VYTLACENIA
?>
<?php
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykaz'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykazx'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
$sqlt = 'DROP TABLE F'.$kli_vxcf.'_uctprcvykazz'.$kli_uzid;
$vysledok = mysql_query("$sqlt");
//celkovy koniec
$cislista = include("uct_lista_norm.php");
} while (false);
?>
</BODY>
</HTML>
|
eurosecom/projekt1
|
ucto/vykaz_fin304_2016.php
|
PHP
|
apache-2.0
| 87,883
|
package org.apache.commons.jelly.tags.quartz;
/*
* Copyright 2002,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.commons.jelly.XMLOutput;
import org.apache.commons.jelly.JellyTagException;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
/** Block and wait for the Quartz scheduler to shutdown.
*
* @author <a href="mailto:bob@eng.werken.com">bob mcwhirter</a>
*/
public class WaitForSchedulerTag extends QuartzTagSupport
{
// ------------------------------------------------------------
// Constructors
// ------------------------------------------------------------
/** Construct.
*/
public WaitForSchedulerTag()
{
// intentionally left blank.
}
// ------------------------------------------------------------
// Instance methods
// ------------------------------------------------------------
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// org.apache.commons.jelly.Tag
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
/** Perform this tag.
*
* @param output Output sink.
*
* @throws Exception If an error occurs.
*/
public void doTag(XMLOutput output) throws JellyTagException
{
try {
Scheduler sched = getScheduler();
while ( ! sched.isShutdown() )
{
try
{
Thread.sleep( 500 );
}
catch (InterruptedException e)
{
break;
}
}
}
catch (SchedulerException e) {
throw new JellyTagException(e);
}
}
}
|
hudson3-plugins/commons-jelly
|
jelly-tags/quartz/src/java/org/apache/commons/jelly/tags/quartz/WaitForSchedulerTag.java
|
Java
|
apache-2.0
| 2,364
|
/*
* Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "attention.h"
#include "attentionPlugin.h"
#include <algorithm>
#include <cstring>
#include <iostream>
#include <sstream>
#include <vector>
//#define DEBUG 0
using namespace nvinfer1;
namespace
{
const char* ATTENTION_PLUGIN_VERSION{"1"};
const char* ATTENTION_PLUGIN_NAME{"Attention_TRT"};
}
PluginFieldCollection AttentionPluginCreator::mFC{};
REGISTER_TENSORRT_PLUGIN(AttentionPluginCreator);
//! Helper function for serializing plugin
template <typename T>
void writeToBuffer(char*& buffer, const T& val)
{
*reinterpret_cast<T*>(buffer) = val;
buffer += sizeof(T);
}
//! Helper function for deserializing plugin
template <typename T>
T readFromBuffer(const char*& buffer)
{
T val = *reinterpret_cast<const T*>(buffer);
buffer += sizeof(T);
return val;
}
AttentionPlugin::AttentionPlugin(std::string name, const void* data, size_t length)
: mLayerName{name}
{
const char* d = static_cast<const char*>(data);
const char* a = d;
params.nbElements = readFromBuffer<int>(d);
size_t nbDims = readFromBuffer<size_t>(d);
for(int i = 0; i < 4; i++)
{
for(unsigned int j = 0; j < nbDims; j++)
{
params.inputStrides[i].push_back(readFromBuffer<int>(d));
}
}
for(unsigned int j = 0; j < nbDims; j++)
{
params.resStrides.push_back(readFromBuffer<int>(d));
}
for(unsigned int j = 0; j < nbDims; j++)
{
params.len.push_back(readFromBuffer<int>(d));
}
params.type = (nvinfer1::DataType)(readFromBuffer<int>(d));
assert(d == (a + length));
}
AttentionPlugin::AttentionPlugin(std::string name)
: mLayerName{name}
{
}
//!
//! \brief This function computes the strides for a given tensor. If the length along a dimension is 1,
//! the stride along that dimension is set to 0 to aid broadcast, else it is the product of the lower dimensions.
//! Since dims.nbDims does not contain
//! the batch dimension, we add a placeholder for the batchStride at strides[0].
//!
std::vector<int> computeStrides(Dims dims, bool isBroadcastAccrossN)
{
int stride = 1;
std::vector<int> strides;
strides.resize(dims.nbDims + 1);
strides[dims.nbDims] = 1;
if (dims.d[dims.nbDims - 1] == 1)
{
strides[dims.nbDims] = 0;
}
for (int i = dims.nbDims - 2; i >= 0; i--)
{
stride *= dims.d[i + 1];
if (dims.d[i] == 1)
{
strides[i + 1] = 0;
}
else
{
strides[i + 1] = stride;
}
}
strides[0] = isBroadcastAccrossN ? 0 : stride * dims.d[0];
#ifdef DEBUG
for (int i : strides)
std::cout << i << " ";
std::cout << std::endl;
#endif
return strides;
}
int AttentionPlugin::getNbOutputs() const
{
return 1;
}
int AttentionPlugin::initialize()
{
return 0;
}
void AttentionPlugin::terminate()
{
}
Dims AttentionPlugin::getOutputDimensions(int index, const Dims* inputs, int nbInputDims)
{
assert(index >= 0 && index < this->getNbOutputs());
assert(inputs[0].nbDims == 3);
assert(inputs[1].nbDims == 3);
assert(inputs[2].nbDims == 3);
assert(inputs[3].nbDims == 3);
Dims outputDims;
outputDims.nbDims = 3;
outputDims.d[0] = inputs[1].d[0];
outputDims.d[1] = inputs[0].d[1];
outputDims.d[2] = 1;
// printf("%d %d %d\n", outputDims.d[0], outputDims.d[1], inputs[0].d[2]);
outputDims.type[0] = DimensionType::kCHANNEL;
outputDims.type[1] = DimensionType::kSPATIAL;
outputDims.type[2] = DimensionType::kSPATIAL;
return outputDims;
}
size_t AttentionPlugin::getWorkspaceSize(int maxBatchSize) const
{
return 0;
}
int AttentionPlugin::enqueue(
int batchSize, const void* const* inputs, void** outputs, void* workspace, cudaStream_t stream)
{
launchAttentionFusedKernel(params, batchSize, inputs, outputs[0], stream);
return 0;
}
size_t AttentionPlugin::getSerializationSize() const
{
size_t sz = 0;
sz += sizeof(params.nbElements);
sz += sizeof(params.inputStrides[0].size());
//! Store the strides of the inputs
for(int i = 0; i < 4; i++)
{
for(unsigned int j = 0; j < params.inputStrides[i].size(); j++)
{
sz += sizeof(params.inputStrides[i][j]);
}
}
//! Store the strides of the output
for(unsigned int j = 0; j < params.resStrides.size(); j++)
{
sz += sizeof(params.resStrides[j]);
}
//! Store the lengths in each dimension
for(unsigned int j = 0; j < params.len.size(); j++)
{
sz += sizeof(params.len[j]);
}
sz += sizeof(int);
return sz;
}
void AttentionPlugin::serialize(void* buffer) const
{
//! Serialize each member of the AttentionParams struct
char* d = static_cast<char*>(buffer);
const char* a = d;
writeToBuffer(d, params.nbElements);
//! Store the number of dimensions to make reading/writing the strides easier
writeToBuffer(d, params.inputStrides[0].size());
//! Store the strides of the inputs
for(int i = 0; i < 4; i++)
{
for(unsigned int j = 0; j < params.inputStrides[i].size(); j++)
{
writeToBuffer(d, params.inputStrides[i][j]);
}
}
//! Store the strides of the output
for(unsigned int j = 0; j < params.resStrides.size(); j++)
{
writeToBuffer(d, params.resStrides[j]);
}
//! Store the lengths in each dimension
for(unsigned int j = 0; j < params.len.size(); j++)
{
writeToBuffer(d, params.len[j]);
}
writeToBuffer(d, (int)params.type);
assert(d == a + getSerializationSize());
}
void AttentionPlugin::configurePlugin(const Dims* inputDims, int nbInputs, const Dims* outputDims, int nbOutputs,
const DataType* inputTypes, const DataType* outputTypes, const bool* inputIsBroadcast,
const bool* outputIsBroadcast, nvinfer1::PluginFormat format, int maxBatchSize)
{
assert(nbInputs == 4);
assert(nbOutputs == 1);
assert(format == PluginFormat::kNCHW);
//! Compute Strides from inputDims and outputDims
params.inputStrides[0] = computeStrides(inputDims[0], inputIsBroadcast[0]);
params.inputStrides[1] = computeStrides(inputDims[1], inputIsBroadcast[1]);
params.inputStrides[2] = computeStrides(inputDims[2], inputIsBroadcast[2]);
params.inputStrides[3] = computeStrides(inputDims[3], inputIsBroadcast[3]);
params.resStrides = computeStrides(outputDims[0], false);
params.nbElements = 1;
params.type = inputTypes[0];
//! Add placeholder for batchSize dimension length
params.len.push_back(1);
//! Calculate number of elements in the output and the length of each dimension
for (int i = 0; i < outputDims[0].nbDims; i++)
{
params.nbElements *= outputDims[0].d[i];
params.len.push_back(outputDims[0].d[i]);
}
#ifdef DEBUG
std::cout << "Printing the lengths:\n";
for (int i : params.len)
std::cout << i << " ";
std::cout << std::endl;
#endif
}
bool AttentionPlugin::supportsFormat(DataType type, PluginFormat format) const
{
return ((type == DataType::kFLOAT || type == DataType::kHALF) && format == PluginFormat::kNCHW);
}
const char* AttentionPlugin::getPluginType() const
{
return ATTENTION_PLUGIN_NAME;
}
const char* AttentionPlugin::getPluginVersion() const
{
return ATTENTION_PLUGIN_VERSION;
}
void AttentionPlugin::destroy()
{
delete this;
}
IPluginV2Ext* AttentionPlugin::clone() const
{
auto* plugin = new AttentionPlugin(mLayerName);
plugin->setPluginNamespace(mNamespace.c_str());
plugin->params = params;
return plugin;
}
nvinfer1::DataType AttentionPlugin::getOutputDataType(
int index, const nvinfer1::DataType* inputTypes, int nbInputs) const
{
return inputTypes[0];
}
bool AttentionPlugin::isOutputBroadcastAcrossBatch(int outputIndex, const bool* inputIsBroadcasted, int nbInputs) const
{
return false;
}
bool AttentionPlugin::canBroadcastInputAcrossBatch(int inputIndex) const
{
switch (inputIndex)
{
case 0: return true;
case 1: return true;
case 2: return true;
case 3: return true;
default: return false;
}
return false;
}
AttentionPluginCreator::AttentionPluginCreator()
{
}
const char* AttentionPluginCreator::getPluginName() const
{
return ATTENTION_PLUGIN_NAME;
}
const char* AttentionPluginCreator::getPluginVersion() const
{
return ATTENTION_PLUGIN_VERSION;
}
const PluginFieldCollection* AttentionPluginCreator::getFieldNames()
{
return &mFC;
}
IPluginV2* AttentionPluginCreator::createPlugin(const char* name, const PluginFieldCollection* fc)
{
return new AttentionPlugin(name);
}
IPluginV2* AttentionPluginCreator::deserializePlugin(const char* name, const void* serialData, size_t serialLength)
{
return new AttentionPlugin(name, serialData, serialLength);
}
void AttentionPluginCreator::setPluginNamespace(const char* libNamespace)
{
mNamespace = libNamespace;
}
const char* AttentionPluginCreator::getPluginNamespace() const
{
return mNamespace.c_str();
}
|
mlperf/inference_results_v0.5
|
open/NVIDIA/code/gnmt/tensorrt/src/plugin/attentionPlugin.cpp
|
C++
|
apache-2.0
| 9,784
|
// Copyright 2021 The Kubeflow Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import dagre from 'dagre';
import {
ArrowHeadType,
Edge,
Elements,
FlowElement,
isNode,
Node,
Position,
} from 'react-flow-renderer';
import ArtifactNode from 'src/components/graph/ArtifactNode';
import { FlowElementDataBase } from 'src/components/graph/Constants';
import ExecutionNode from 'src/components/graph/ExecutionNode';
import SubDagNode from 'src/components/graph/SubDagNode';
import { ComponentSpec, PipelineSpec, PipelineTaskSpec } from 'src/generated/pipeline_spec';
const nodeWidth = 224;
const nodeHeight = 48;
export enum NodeTypeNames {
EXECUTION = 'EXECUTION',
ARTIFACT = 'ARTIFACT',
SUB_DAG = 'SUB_DAG',
}
export const NODE_TYPES = {
[NodeTypeNames.EXECUTION]: ExecutionNode,
[NodeTypeNames.ARTIFACT]: ArtifactNode,
[NodeTypeNames.SUB_DAG]: SubDagNode,
};
export enum TaskType {
EXECUTOR,
DAG,
}
interface ComponentSpecPair {
componentRefName: string;
componentSpec: ComponentSpec;
}
export type PipelineFlowElement = FlowElement<FlowElementDataBase>;
/**
* Convert static IR to Reactflow compatible graph description.
* @param spec KFP v2 Pipeline definition
* @returns Graph visualization as Reactflow elements (nodes and edges)
*/
export function convertFlowElements(spec: PipelineSpec): Elements {
// Find all tasks --> nodes
// Find all depdencies --> edges
const root = spec.root;
if (!root) {
throw new Error('root not found in pipeline spec.');
}
return buildDag(spec, root);
}
export function convertSubDagToFlowElements(spec: PipelineSpec, layers: string[]): Elements {
let componentSpec = spec.root;
if (!componentSpec) {
throw new Error('root not found in pipeline spec.');
}
const componentsMap = spec.components;
for (let index = 1; index < layers.length; index++) {
const tasksMap:
| {
[key: string]: PipelineTaskSpec;
}
| undefined = componentSpec.dag?.tasks;
if (!tasksMap) {
throw new Error("Unable to get task maps from Pipeline Spec's dag.");
}
const pipelineTaskSpec: PipelineTaskSpec = tasksMap[layers[index]];
const componetRef = pipelineTaskSpec.componentRef;
const componentName = componetRef?.name;
if (!componentName) {
throw new Error(
'Unable to find the component reference for task name: ' +
pipelineTaskSpec.taskInfo?.name || 'Task name unknown',
);
}
componentSpec = componentsMap[componentName];
if (!componentSpec) {
throw new Error('Component not found in pipeline spec. Component name: ' + componentName);
}
}
return buildDag(spec, componentSpec);
}
/**
* Build single layer graph of a pipeline definition in Reactflow.
* @param pipelineSpec Full pipeline definition
* @param componentSpec Designated layer of a DAG/sub-DAG as part of pipelineSpec
* @returns Graph visualization as Reactflow elements (nodes and edges)
*/
function buildDag(pipelineSpec: PipelineSpec, componentSpec: ComponentSpec): Elements {
const dag = componentSpec.dag;
if (!dag) {
throw new Error('dag not found in component spec.');
}
const componentsMap = pipelineSpec.components || {};
let flowGraph: FlowElement[] = [];
const tasksMap = dag.tasks || {};
console.log('tasksMap count: ' + tasksMap.length);
addTaskNodes(tasksMap, componentsMap, flowGraph);
addArtifactNodes(tasksMap, componentsMap, flowGraph);
addTaskToArtifactEdges(tasksMap, componentsMap, flowGraph);
addArtifactToTaskEdges(tasksMap, flowGraph);
addTaskToTaskEdges(tasksMap, flowGraph);
return buildGraphLayout(flowGraph);
}
function addTaskNodes(
tasksMap: {
[key: string]: PipelineTaskSpec;
},
componentsMap: { [key: string]: ComponentSpec },
flowGraph: PipelineFlowElement[],
) {
// Add tasks as nodes to the Reactflow graph.
for (let taskKey in tasksMap) {
const taskSpec = tasksMap[taskKey];
const componentPair = getComponent(taskKey, taskSpec, componentsMap);
if (componentPair === undefined) {
console.warn("Component for specific task doesn't exist.");
continue;
}
const { componentRefName, componentSpec } = componentPair;
// Component can be either an executor or subDAG,
// If this is executor, add the node directly.
// If subDAG, add a node which can represent expandable graph.
const name = taskSpec.taskInfo?.name;
if (!name) {
console.warn("Task name doesn't exist.");
continue;
}
if (componentSpec.executorLabel && componentSpec.executorLabel.length > 0) {
// executor label exists means this is a single execution node.
const node: Node<FlowElementDataBase> = {
id: getTaskNodeKey(taskKey), // Assume that key of `tasks` in `dag` is unique.
data: { label: name, taskType: TaskType.EXECUTOR },
position: { x: 100, y: 200 },
type: NodeTypeNames.EXECUTION,
};
flowGraph.push(node);
} else if (componentSpec.dag) {
// dag exists means this is a sub-DAG instance.
const node: Node<FlowElementDataBase> = {
id: getTaskNodeKey(taskKey),
data: { label: 'DAG: ' + name, taskType: TaskType.DAG },
position: { x: 100, y: 200 },
type: NodeTypeNames.SUB_DAG,
};
flowGraph.push(node);
} else {
console.warn('Component ' + componentRefName + ' has neither `executorLabel` nor `dag`');
}
}
}
function addArtifactNodes(
tasksMap: {
[key: string]: PipelineTaskSpec;
},
componentsMap: { [key: string]: ComponentSpec },
flowGraph: PipelineFlowElement[],
) {
for (let taskKey in tasksMap) {
const taskSpec = tasksMap[taskKey];
const componentPair = getComponent(taskKey, taskSpec, componentsMap);
if (componentPair === undefined) {
console.warn("Component for specific task doesn't exist.");
continue;
}
const { componentSpec } = componentPair;
// Find all artifacts --> nodes with custom style
// Input: components -> key/value -> inputDefinitions -> artifacts -> name/key
// Output: components -> key/value -> outputDefinitions -> artifacts -> name/key
// Calculate Output in this function.
const outputDefinitions = componentSpec.outputDefinitions;
if (!outputDefinitions) return;
const artifacts = outputDefinitions.artifacts;
for (let artifactKey in artifacts) {
const node: Node<FlowElementDataBase> = {
id: getArtifactNodeKey(taskKey, artifactKey),
data: { label: artifactKey },
position: { x: 300, y: 200 },
type: NodeTypeNames.ARTIFACT,
};
flowGraph.push(node);
}
}
}
function addTaskToArtifactEdges(
tasksMap: {
[key: string]: PipelineTaskSpec;
},
componentsMap: { [key: string]: ComponentSpec },
flowGraph: PipelineFlowElement[],
) {
// Find output and input artifacts --> edges
// Task to Artifact: components -> key/value -> outputDefinitions -> artifacts -> key
for (let taskKey in tasksMap) {
const taskSpec = tasksMap[taskKey];
const componentPair = getComponent(taskKey, taskSpec, componentsMap);
if (componentPair === undefined) {
console.warn("Component for specific task doesn't exist.");
continue;
}
const { componentSpec } = componentPair;
const outputDefinitions = componentSpec.outputDefinitions;
if (!outputDefinitions) return;
const artifacts = outputDefinitions.artifacts;
for (let artifactKey in artifacts) {
const edge: Edge = {
id: getTaskToArtifactEdgeKey(taskKey, artifactKey),
source: getTaskNodeKey(taskKey),
target: getArtifactNodeKey(taskKey, artifactKey),
arrowHeadType: ArrowHeadType.ArrowClosed,
};
flowGraph.push(edge);
}
}
}
function addArtifactToTaskEdges(
tasksMap: {
[key: string]: PipelineTaskSpec;
},
flowGraph: PipelineFlowElement[],
) {
// Artifact to Task: root -> dag -> tasks -> key/value -> inputs -> artifacts -> key/value
// -> taskOutputArtifact -> outputArtifactKey+producerTask
for (let inputTaskKey in tasksMap) {
const taskSpec = tasksMap[inputTaskKey];
const inputs = taskSpec.inputs;
if (!inputs) {
continue;
}
const artifacts = inputs.artifacts;
for (let artifactKey in artifacts) {
const artifactSpec = artifacts[artifactKey];
const taskOutputArtifact = artifactSpec.taskOutputArtifact;
if (!taskOutputArtifact) {
continue;
}
const outputArtifactKey = taskOutputArtifact.outputArtifactKey;
const producerTask = taskOutputArtifact.producerTask;
const edge: Edge = {
id: getArtifactToTaskEdgeKey(outputArtifactKey, inputTaskKey),
source: getArtifactNodeKey(producerTask, outputArtifactKey),
target: getTaskNodeKey(inputTaskKey),
arrowHeadType: ArrowHeadType.ArrowClosed,
};
flowGraph.push(edge);
}
}
}
function addTaskToTaskEdges(
tasksMap: {
[key: string]: PipelineTaskSpec;
},
flowGraph: PipelineFlowElement[],
) {
const edgeKeys = new Map<String, Edge>();
// Input Parameters: inputs => parameters => taskOutputParameter => producerTask
for (let inputTaskKey in tasksMap) {
const taskSpec = tasksMap[inputTaskKey];
const inputs = taskSpec.inputs;
if (!inputs) {
continue;
}
const parameters = inputs.parameters;
for (let paramName in parameters) {
const paramSpec = parameters[paramName];
const taskOutputParameter = paramSpec.taskOutputParameter;
if (taskOutputParameter) {
const producerTask = taskOutputParameter.producerTask;
const edgeId = getTaskToTaskEdgeKey(producerTask, inputTaskKey);
if (edgeKeys.has(edgeId)) {
return;
}
const edge: Edge = {
// id is combination of producerTask+inputTask
id: edgeId,
source: getTaskNodeKey(producerTask),
target: getTaskNodeKey(inputTaskKey),
// TODO(zijianjoy): This node styling is temporarily.
arrowHeadType: ArrowHeadType.ArrowClosed,
};
flowGraph.push(edge);
edgeKeys.set(edgeId, edge);
}
}
}
// DependentTasks: task => dependentTasks list
for (let inputTaskKey in tasksMap) {
const taskSpec = tasksMap[inputTaskKey];
const dependentTasks = taskSpec.dependentTasks;
if (!dependentTasks) {
continue;
}
dependentTasks.forEach(upStreamTaskName => {
const edgeId = getTaskToTaskEdgeKey(upStreamTaskName, inputTaskKey);
if (edgeKeys.has(edgeId)) {
return;
}
const edge: Edge = {
// id is combination of producerTask+inputTask
id: edgeId,
source: getTaskNodeKey(upStreamTaskName),
target: getTaskNodeKey(inputTaskKey),
// TODO(zijianjoy): This node styling is temporarily.
arrowHeadType: ArrowHeadType.ArrowClosed,
};
flowGraph.push(edge);
edgeKeys.set(edgeId, edge);
});
}
}
export function buildGraphLayout(flowGraph: PipelineFlowElement[]) {
const dagreGraph = new dagre.graphlib.Graph();
dagreGraph.setDefaultEdgeLabel(() => ({}));
dagreGraph.setGraph({ rankdir: 'TB' });
flowGraph.forEach(el => {
if (isNode(el)) {
dagreGraph.setNode(el.id, { width: nodeWidth, height: nodeHeight });
} else {
dagreGraph.setEdge(el.source, el.target);
}
});
dagre.layout(dagreGraph);
return flowGraph.map(el => {
if (isNode(el)) {
const nodeWithPosition = dagreGraph.node(el.id);
el.sourcePosition = Position.Bottom;
el.targetPosition = Position.Top;
// unfortunately we need this little hack to pass a slightly different position
// to notify react flow about the change. Moreover we are shifting the dagre node position
// (anchor=center center) to the top left so it matches the react flow node anchor point (top left).
el.position = {
x: nodeWithPosition.x - nodeWidth / 2 + Math.random() / 1000,
y: nodeWithPosition.y - nodeHeight / 2,
};
}
return el;
});
}
function getComponent(
taskKey: string,
taskSpec: PipelineTaskSpec,
componentsMap: { [key: string]: ComponentSpec },
): ComponentSpecPair | undefined {
const componentRef = taskSpec.componentRef;
if (componentRef === undefined) {
console.warn('ComponentRef not found for task: ' + taskKey);
return undefined;
}
const componentRefName = componentRef.name;
if (!(componentRefName in componentsMap)) {
console.warn(
`Cannot find componentRef name ${componentRefName} from pipeline's components Map`,
);
return undefined;
}
const componentSpec = componentsMap[componentRefName];
if (componentSpec === undefined) {
console.warn('Component undefined for componentRef name: ' + componentRefName);
return undefined;
}
return { componentRefName, componentSpec };
}
const TASK_NODE_KEY_PREFIX = 'task.';
function getTaskNodeKey(taskKey: string) {
return TASK_NODE_KEY_PREFIX + taskKey;
}
export function getTaskKeyFromNodeKey(nodeKey: string) {
if (!isTaskNode(nodeKey)) {
throw new Error('Task nodeKey: ' + nodeKey + " doesn't start with " + TASK_NODE_KEY_PREFIX);
}
return nodeKey.substr(TASK_NODE_KEY_PREFIX.length);
}
export function isTaskNode(nodeKey: string) {
return nodeKey.startsWith(TASK_NODE_KEY_PREFIX);
}
const ARTIFACT_NODE_KEY_PREFIX = 'artifact.';
export function getArtifactNodeKey(taskKey: string, artifactKey: string): string {
// id is in pattern artifact.producerTaskKey.outputArtifactKey
// Because task name and artifact name cannot contain dot in python.
return ARTIFACT_NODE_KEY_PREFIX + taskKey + '.' + artifactKey;
}
export function isArtifactNode(nodeKey: string) {
return nodeKey.startsWith(ARTIFACT_NODE_KEY_PREFIX);
}
export function getKeysFromArtifactNodeKey(nodeKey: string) {
const sections = nodeKey.split('.');
if (!isArtifactNode(nodeKey)) {
throw new Error(
'Artifact nodeKey: ' + nodeKey + " doesn't start with " + ARTIFACT_NODE_KEY_PREFIX,
);
}
if (sections.length !== 3) {
throw new Error(
'Artifact nodeKey: ' + nodeKey + " doesn't have format artifact.taskName.artifactName ",
);
}
return [sections[1], sections[2]];
}
function getTaskToArtifactEdgeKey(taskKey: string, artifactKey: string): string {
// id is in pattern outedge.producerTaskKey.outputArtifactKey
return 'outedge.' + taskKey + '.' + artifactKey;
}
function getArtifactToTaskEdgeKey(outputArtifactKey: string, inputTaskKey: string): string {
// id is in pattern of inedge.artifactKey.inputTaskKey
return 'inedge.' + outputArtifactKey + '.' + inputTaskKey;
}
function getTaskToTaskEdgeKey(producerTask: string, inputTaskKey: string) {
// id is in pattern of paramedge.producerTaskKey.inputTaskKey
return 'paramedge.' + producerTask + '.' + inputTaskKey;
}
|
kubeflow/pipelines
|
frontend/src/lib/v2/StaticFlow.ts
|
TypeScript
|
apache-2.0
| 15,422
|
//FirstView Component Constructor
var queViewArray = [];
function FirstView() {
var QueDetailWindow = require('ui/common/QueDetailWindow');
var mainBackgroundColor = "gray";
var queWholeValue = [];
//create object instance, a parasitic subclass of Observable
var self = Ti.UI.createView({
backgroundColor: mainBackgroundColor
});
var queHeight = Ti.Platform.displayCaps.platformHeight / 6;
var pushButtonView = Ti.UI.createImageView({
image: "/images/push_button_250_250.png",
zIndex: 999
});
self.add(pushButtonView);
var resetButtonView = Ti.UI.createImageView({
image: "/images/reset.jpg",
top: 10,
right: 10,
width: 50,
height: 50,
zIndex: 999
});
self.add(resetButtonView);
var queIndex = 0;
pushButtonView.addEventListener("click", function(e) {
if (queIndex < 6) {
var queValue = Math.random() >= 0.5 ? 1 : 0;
console.log(queValue);
queWholeValue.push(queValue);
var options = {
queIndex: queIndex,
queHeight: queHeight,
backgroundColor: mainBackgroundColor
}
var queView = createQueView(queValue, options);
self.add(queView);
} else if (queIndex == 6) { // if que == 6, xin hao dong
// xin hao dong
var haoDong = Math.floor(Math.random() * 6);
highlightHao(haoDong);
// reverse que lai, doc tu duoi len
queWholeValue.reverse();
// add hao dong vao queWholeValue
queWholeValue.push(haoDong);
console.log(queWholeValue.toString());
// xin xong, chuyen sang window khac
self.hide();
var queWindow = new QueDetailWindow({
data: queWholeValue
});
queWindow.open();
}
queIndex++;
});
resetButtonView.addEventListener("click", function(e) {
queWholeValue = [];
queIndex = 0;
removeAllQueViews(self);
queViewArray = [];
});
return self;
}
function createQueView(queValue, options) {
var queIndex = options["queIndex"];
var queHeight = options["queHeight"] * 90 / 100;
var backgroundColor = options["backgroundColor"];
var _queView = Ti.UI.createView({
top: queHeight * queIndex,
height: queHeight,
width: "100%",
borderColor: backgroundColor,
borderWidth: 5,
borderRadius: 20,
backgroundColor: queValue == 1 ? "black" : "white"
});
queViewArray.push(_queView);
return _queView;
}
function removeAllQueViews(window) {
for( var i = 0; i < queViewArray.length; i++) {
var _queView = queViewArray[i];
window.remove(_queView);
}
}
function highlightHao(index) {
var _queView = queViewArray[index];
_queView.backgroundColor = "#900";
}
module.exports = FirstView;
|
mreigen/KhongMinhThanToan
|
build/mobileweb/ui/common/FirstView.js
|
JavaScript
|
apache-2.0
| 2,724
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package dfp.axis.v201502.ratecardservice;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.dfp.axis.factory.DfpServices;
import com.google.api.ads.dfp.axis.utils.v201502.StatementBuilder;
import com.google.api.ads.dfp.axis.v201502.RateCard;
import com.google.api.ads.dfp.axis.v201502.RateCardPage;
import com.google.api.ads.dfp.axis.v201502.RateCardServiceInterface;
import com.google.api.ads.dfp.lib.client.DfpSession;
import com.google.api.client.auth.oauth2.Credential;
/**
* This example gets all rate cards.
*
* Credentials and properties in {@code fromFile()} are pulled from the
* "ads.properties" file. See README for more info.
*/
public class GetAllRateCards {
public static void runExample(DfpServices dfpServices, DfpSession session) throws Exception {
// Get the RateCardService.
RateCardServiceInterface rateCardService =
dfpServices.get(session, RateCardServiceInterface.class);
// Create a statement to get all rate cards.
StatementBuilder statementBuilder = new StatementBuilder()
.orderBy("id ASC")
.limit(StatementBuilder.SUGGESTED_PAGE_LIMIT);
// Default for total result set size.
int totalResultSetSize = 0;
do {
// Get rate cards by statement.
RateCardPage page = rateCardService.getRateCardsByStatement(statementBuilder.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize();
int i = page.getStartIndex();
for (RateCard rateCard : page.getResults()) {
System.out.printf(
"%d) Rate card with ID \"%d\", name \"%s\", and currency \"%s\" was found.%n", i++,
rateCard.getId(), rateCard.getName(), rateCard.getCurrencyCode());
}
}
statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (statementBuilder.getOffset() < totalResultSetSize);
System.out.printf("Number of results found: %d%n", totalResultSetSize);
}
public static void main(String[] args) throws Exception {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.DFP)
.fromFile()
.build()
.generateCredential();
// Construct a DfpSession.
DfpSession session = new DfpSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
DfpServices dfpServices = new DfpServices();
runExample(dfpServices, session);
}
}
|
shyTNT/googleads-java-lib
|
examples/dfp_axis/src/main/java/dfp/axis/v201502/ratecardservice/GetAllRateCards.java
|
Java
|
apache-2.0
| 3,229
|
<?php
namespace Basho\Riak\Command\Builder\Search;
use Basho\Riak;
use Basho\Riak\Command;
/**
* Builds the command to fetch a collection of objects from Riak using Yokozuna search
*
* @author Christopher Mancini <cmancini at basho d0t com>
*/
class FetchObjects extends Command\Builder implements Command\BuilderInterface
{
protected $default_field = '';
protected $default_operation = '';
protected $index_name = '';
public function __construct(Riak $riak)
{
parent::__construct($riak);
$this->parameters['wt'] = 'json';
$this->parameters['rows'] = 10;
$this->parameters['start'] = 0;
}
/**
* {@inheritdoc}
*
* @return Command\Search\Fetch;
*/
public function build()
{
$this->validate();
return new Command\Search\Fetch($this);
}
/**
* {@inheritdoc}
*/
public function validate()
{
$this->required('IndexName');
$this->required('Query');
$this->required('MaxRows');
$this->required('StartRow');
}
public function withIndexName($name)
{
$this->index_name = $name;
return $this;
}
public function getIndexName()
{
return $this->index_name;
}
/**
* @return string
*/
public function getQuery()
{
return $this->parameters['q'];
}
/**
* @return int
*/
public function getMaxRows()
{
return $this->parameters['rows'];
}
/**
* @return int
*/
public function getStartRow()
{
return $this->parameters['start'];
}
/**
* @return string
*/
public function getFilterQuery()
{
return $this->parameters['fq'];
}
/**
* @return string
*/
public function getSortField()
{
return $this->parameters['sort'];
}
/**
* @return string
*/
public function getDefaultField()
{
return $this->default_field;
}
/**
* @return string
*/
public function getDefaultOperation()
{
return $this->default_operation;
}
/**
* @return string
*/
public function getReturnFields()
{
return $this->parameters['fl'];
}
public function withQuery($query)
{
$this->parameters['q'] = $query;
return $this;
}
public function withMaxRows($rows)
{
$this->parameters['rows'] = $rows;
return $this;
}
public function withStartRow($row_num)
{
$this->parameters['start'] = $row_num;
return $this;
}
public function withSortField($field_name)
{
$this->parameters['sort'] = $field_name;
return $this;
}
public function withFilterQuery($filter_query)
{
$this->parameters['fq'] = $filter_query;
return $this;
}
public function withDefaultField($default_field)
{
$this->parameters['df'] = $default_field;
return $this;
}
public function withDefaultOperation($default_operation)
{
$this->parameters['op'] = $default_operation;
return $this;
}
public function withReturnFields($return_fields)
{
$this->parameters['fl'] = $return_fields;
return $this;
}
}
|
basho/riak-php-client
|
src/Riak/Command/Builder/Search/FetchObjects.php
|
PHP
|
apache-2.0
| 3,346
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.examples.hello;
import org.apache.reef.runtime.hdinsight.client.UnsafeHDInsightRuntimeConfiguration;
import org.apache.reef.tang.exceptions.InjectionException;
import java.io.IOException;
/**
* HelloREEF running on HDInsight
*/
public class HelloHDInsight {
public static void main(final String[] args) throws InjectionException, IOException {
HelloREEFNoClient.runHelloReefWithoutClient(UnsafeHDInsightRuntimeConfiguration.fromEnvironment());
}
}
|
beysims/reef
|
lang/java/reef-examples-hdinsight/src/main/java/org/apache/reef/examples/hello/HelloHDInsight.java
|
Java
|
apache-2.0
| 1,294
|
<?php
/**
* COmanage Registry Names Controller
*
* Portions licensed to the University Corporation for Advanced Internet
* Development, Inc. ("UCAID") under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* UCAID licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @link http://www.internet2.edu/comanage COmanage Project
* @package registry
* @since COmanage Registry v0.8.3
* @license Apache License, Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
*/
App::uses("MVPAController", "Controller");
class NamesController extends MVPAController {
// Class name, used by Cake
public $name = "Names";
// Establish pagination parameters for HTML views
public $paginate = array(
'limit' => 25,
'order' => array(
'Name.family' => 'asc'
)
);
public $edit_contains = array(
'CoPerson' => array('PrimaryName'),
'OrgIdentity' => array('PrimaryName')
);
public $view_contains = array(
'CoPerson' => array('PrimaryName'),
'OrgIdentity' => array('OrgIdentitySourceRecord' => array('OrgIdentitySource'),
'PrimaryName'),
'SourceName'
);
/**
* Callback to set relevant tab to open when redirecting to another page
*
* @since COmanage Registry v0.8.3
*/
function beforeFilter() {
$this->redirectTab = 'name';
parent::beforeFilter();
}
/**
* Callback before views are rendered.
*
* @since COmanage Registry v0.9.1
*/
function beforeRender() {
parent::beforeRender();
if(!$this->request->is('restful')) {
// Set required and permitted fields according to whether or not this is attached to a CO Person (Role)
$pids = $this->parsePersonID();
if($pids['copersonid']) {
$this->set('vv_required_fields', $this->Name->CoPerson->Co->CoSetting->getRequiredNameFields($this->cur_co['Co']['id']));
$this->set('vv_permitted_fields', $this->Name->CoPerson->Co->CoSetting->getPermittedNameFields($this->cur_co['Co']['id']));
} else {
// Always use default settings for org identities
$this->set('vv_required_fields', $this->Name->CoPerson->Co->CoSetting->getRequiredNameFields());
$this->set('vv_permitted_fields', $this->Name->CoPerson->Co->CoSetting->getPermittedNameFields($this->cur_co['Co']['id']));
}
}
}
/**
* Perform any dependency checks required prior to a delete operation.
* - postcondition: Session flash message updated (HTML) or HTTP status returned (REST)
*
* @since COmanage Registry v0.8.3
* @param Array Current data
* @return boolean true if dependency checks succeed, false otherwise.
*/
function checkDeleteDependencies($curdata) {
// We can't delete a primary name
if($curdata['Name']['primary_name']) {
if($this->request->is('restful')) {
$this->Api->restResultHeader(403, "Primary Name Cannot Be Deleted");
} else {
$this->Flash->set(_txt('er.nm.primary',
array(generateCn($curdata['Name']))),
array('key' => 'error'));
}
return false;
}
return true;
}
/**
* Generate a display key to be used in messages such as "Item Added".
*
* @since COmanage Registry v0.9
* @param Array A cached object (eg: from prior to a delete)
* @return string A string to be included for display.
*/
public function generateDisplayKey($c = null) {
// Get a pointer to our model
$req = $this->modelClass;
$model = $this->$req;
if(isset($this->request->data['Name']))
return(generateCn($this->request->data['Name']));
elseif(isset($c['Name']))
return(generateCn($c['Name']));
else
return("(?)");
}
/**
* Generate history records for a transaction. This method is intended to be
* overridden by model-specific controllers, and will be called from within a
* try{} block so that HistoryRecord->record() may be called without worrying
* about catching exceptions.
*
* @since COmanage Registry v0.8.3
* @param String Controller action causing the change
* @param Array Data provided as part of the action (for add/edit)
* @param Array Previous data (for delete/edit)
* @return boolean Whether the function completed successfully (which does not necessarily imply history was recorded)
*/
public function generateHistory($action, $newdata, $olddata) {
// Note: We are overriding MVPAContrller::generateHistory here.
$copersonid = null;
$orgidentityid = null;
$HistoryRecord = null;
$cn = "";
// Find some pointers according to the data
if(isset($newdata['Name']['co_person_id'])) {
$copersonid = $newdata['Name']['co_person_id'];
$cn = generateCn($newdata['Name']);
$HistoryRecord = $this->Name->CoPerson->HistoryRecord;
} elseif(isset($newdata['Name']['org_identity_id'])) {
$orgidentityid = $newdata['Name']['org_identity_id'];
$cn = generateCn($newdata['Name']);
$HistoryRecord = $this->Name->OrgIdentity->HistoryRecord;
} elseif(isset($olddata['Name']['co_person_id'])) {
$copersonid = $olddata['Name']['co_person_id'];
$cn = generateCn($olddata['Name']);
$HistoryRecord = $this->Name->CoPerson->HistoryRecord;
} elseif(isset($olddata['Name']['org_identity_id'])) {
$orgidentityid = $olddata['Name']['org_identity_id'];
$cn = generateCn($olddata['Name']);
$HistoryRecord = $this->Name->OrgIdentity->HistoryRecord;
}
switch($action) {
case 'add':
$HistoryRecord->record($copersonid,
null,
$orgidentityid,
$this->Session->read('Auth.User.co_person_id'),
ActionEnum::NameAdded,
_txt('rs.added-a2', array(_txt('ct.names.1'), $cn)));
break;
case 'delete':
$HistoryRecord->record($copersonid,
null,
$orgidentityid,
$this->Session->read('Auth.User.co_person_id'),
ActionEnum::NameDeleted,
_txt('rs.deleted-a2', array(_txt('ct.names.1'), $cn)));
break;
case 'edit':
$HistoryRecord->record($copersonid,
null,
$orgidentityid,
$this->Session->read('Auth.User.co_person_id'),
ActionEnum::NameEdited,
_txt('rs.updated-a2', array(_txt('ct.names.1'), $cn)));
break;
case 'primary':
$HistoryRecord->record($copersonid,
null,
$orgidentityid,
$this->Session->read('Auth.User.co_person_id'),
ActionEnum::NamePrimary,
_txt('rs.nm.primary-a', array($cn)));
break;
}
return true;
}
/**
* Authorization for this Controller, called by Auth component
* - precondition: Session.Auth holds data used for authz decisions
* - postcondition: $permissions set with calculated permissions
*
* @since COmanage Registry v0.8.3
* @return Array Permissions
*/
function isAuthorized() {
$roles = $this->Role->calculateCMRoles();
$pids = $this->parsePersonID($this->request->data);
// Is this a read only record? True if it belongs to an Org Identity that has
// an OrgIdentity Source Record. As of the initial implementation, not even
// CMP admins can edit such a record.
if($this->action == 'edit' && !empty($this->request->params['pass'][0])) {
$orgIdentityId = $this->Name->field('org_identity_id', array('id' => $this->request->params['pass'][0]));
if($orgIdentityId) {
$readOnly = $this->Name->OrgIdentity->readOnly($orgIdentityId);
if($readOnly) {
// Proactively redirect to view. This will also prevent (eg) the REST API
// from editing a read only record.
$args = array(
'controller' => 'names',
'action' => 'view',
filter_var($this->request->params['pass'][0],FILTER_SANITIZE_SPECIAL_CHARS)
);
$this->redirect($args);
}
}
}
// In order to manipulate an email address, the authenticated user must have permission
// over the associated Org Identity or CO Person. For add action, we accept
// the identifier passed in the URL, otherwise we lookup based on the record ID.
$managed = false;
$self = false;
$name = null;
if(!empty($roles['copersonid'])) {
switch($this->action) {
case 'add':
if(!empty($pids['copersonid'])) {
$managed = $this->Role->isCoOrCouAdminForCoPerson($roles['copersonid'],
$pids['copersonid']);
if($pids['copersonid'] == $roles['copersonid']) {
$self = true;
}
} elseif(!empty($pids['orgidentityid'])) {
$managed = $this->Role->isCoOrCouAdminForOrgIdentity($roles['copersonid'],
$pids['orgidentityid']);
}
break;
case 'delete':
case 'edit':
case 'view':
if(!empty($this->request->params['pass'][0])) {
// look up $this->request->params['pass'][0] and find the appropriate co person id or org identity id
// then pass that to $this->Role->isXXX
$args = array();
$args['conditions']['Name.id'] = $this->request->params['pass'][0];
$args['contain'] = false;
$name = $this->Name->find('first', $args);
if(!empty($name['Name']['co_person_id'])) {
$managed = $this->Role->isCoOrCouAdminForCoPerson($roles['copersonid'],
$name['Name']['co_person_id']);
if($name['Name']['co_person_id'] == $roles['copersonid']) {
$self = true;
}
} elseif(!empty($name['Name']['org_identity_id'])) {
$managed = $this->Role->isCoOrCouAdminForOrgidentity($roles['copersonid'],
$name['Name']['org_identity_id']);
}
}
break;
}
}
// Construct the permission set for this user, which will also be passed to the view.
$p = array();
// Self service is a bit complicated because permission can vary by type.
// Self service only applies to CO Person-attached attributes.
$selfperms = array(
'add' => false,
'delete' => false,
'edit' => false,
'view' => false
);
if($self) {
foreach(array_keys($selfperms) as $a) {
$selfperms[$a] = $this->Name
->CoPerson
->Co
->CoSelfServicePermission
->calculatePermission($this->cur_co['Co']['id'],
'Name',
$a,
($a != 'add' && !empty($name['Name']['type']))
? $name['Name']['type'] : null);
}
$p['selfsvc'] = $this->Co->CoSelfServicePermission->findPermissions($this->cur_co['Co']['id']);
} else {
$p['selfsvc'] = null;
}
// Add a new Name?
$p['add'] = ($roles['cmadmin']
|| ($managed && ($roles['coadmin'] || $roles['couadmin']))
|| $selfperms['add']);
// Delete an existing Name?
$p['delete'] = ($roles['cmadmin']
|| ($managed && ($roles['coadmin'] || $roles['couadmin']))
|| $selfperms['delete']);
// Edit an existing Name?
$p['edit'] = ($roles['cmadmin']
|| ($managed && ($roles['coadmin'] || $roles['couadmin']))
|| $selfperms['edit']);
// Making a name primary is the same as editing
$p['primary'] = $p['edit'];
// View all existing Names?
// Currently only supported via REST since there's no use case for viewing all
$p['index'] = $this->request->is('restful') && ($roles['cmadmin'] || $roles['coadmin']);
// View an existing Name?
$p['view'] = ($roles['cmadmin']
|| ($managed && ($roles['coadmin'] || $roles['couadmin']))
|| $selfperms['view']);
$this->set('permissions', $p);
return $p[$this->action];
}
/**
* Make a name primary for the associated Org Identity or CO Person.
* - precondition: <id> must exist
* - precondition: copersonid or orgidentityid must be provided in the URL
* - postcondition: Session flash message updated (HTML) or HTTP status returned (REST)
* - postcondition: On success, all related data (any table with an <object>_id column) is deleted
*
* @since COmanage Registry v0.8.3
* @param integer Name identifier
* @throws InvalidArgumentException
*/
public function primary($id) {
$ret = false;
// We create a transaction here rather than in Name->beforeSave because the model
// can't guarantee a rollbock on any error.
$dbc = $this->Name->getDataSource();
$dbc->begin($this);
// Set the new primary name
// Read the current data for this name
$args = array();
$args['conditions']['Name.id'] = $id;
$args['contain'] = false;
$curdata = $this->Name->find('first', $args);
$this->Name->id = $id;
if($this->Name->saveField('primary_name', true)) {
// Reread the current data for this name
$newdata = $this->Name->find('first', $args);
if($this->recordHistory('primary', $newdata, $curdata)) {
$ret = true;
}
}
if($ret) {
$dbc->commit($this);
$this->Flash->set(_txt('rs.nm.primary'), array('key' => 'success'));
} else {
$dbc->rollback($this);
$this->Flash->set(_txt('er.db.save'), array('key' => 'error'));
}
if(!$this->request->is('restful')) {
// Redirect
if(!empty($this->Name->OrgIdentity->data)) {
$this->checkPersonId("force", $this->Name->OrgIdentity->data);
} else {
$this->checkPersonId("force", $this->Name->CoPerson->data);
}
}
}
}
|
irtnog/comanage-registry
|
app/Controller/NamesController.php
|
PHP
|
apache-2.0
| 15,466
|
/*
* Copyright 2015-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.completion;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.cloud.dataflow.configuration.metadata.ApplicationConfigurationMetadataResolver;
import org.springframework.cloud.dataflow.core.ApplicationType;
import org.springframework.cloud.dataflow.core.TaskDefinition;
import org.springframework.cloud.dataflow.core.dsl.CheckPointedParseException;
import org.springframework.cloud.dataflow.registry.AppRegistryCommon;
import org.springframework.cloud.dataflow.registry.domain.AppRegistration;
/**
* Provides completion proposals when the user has typed the two dashes that precede an
* app configuration property.
*
* @author Eric Bottard
* @author Mark Fisher
* @author Andy Clement
* @author Oleg Zhurakousky
*/
class ConfigurationPropertyNameAfterDashDashTaskRecoveryStrategy
extends StacktraceFingerprintingTaskRecoveryStrategy<CheckPointedParseException> {
private final ProposalsCollectorSupportUtils collectorSupport;
ConfigurationPropertyNameAfterDashDashTaskRecoveryStrategy(AppRegistryCommon appRegistry,
ApplicationConfigurationMetadataResolver metadataResolver) {
super(CheckPointedParseException.class, "file --");
this.collectorSupport = new ProposalsCollectorSupportUtils(appRegistry, metadataResolver);
}
@Override
public void addProposals(String dsl, CheckPointedParseException exception, int detailLevel,
List<CompletionProposal> collector) {
String safe = exception.getExpressionStringUntilCheckpoint();
TaskDefinition taskDefinition = new TaskDefinition("__dummy", safe);
AppRegistration appRegistration = this.collectorSupport.findAppRegistration(taskDefinition.getRegisteredAppName(), ApplicationType.task);
if (appRegistration != null) {
Set<String> alreadyPresentOptions = new HashSet<>(taskDefinition.getProperties().keySet());
this.collectorSupport.addPropertiesProposals(safe, "", appRegistration, alreadyPresentOptions, collector, detailLevel);
}
}
}
|
markfisher/spring-cloud-data
|
spring-cloud-dataflow-completion/src/main/java/org/springframework/cloud/dataflow/completion/ConfigurationPropertyNameAfterDashDashTaskRecoveryStrategy.java
|
Java
|
apache-2.0
| 2,643
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
require "concurrent/atomics"
describe Google::Cloud::PubSub::FlowController, :mock_pubsub do
it "knows its default settings" do
flow_controller = Google::Cloud::PubSub::FlowController.new
_(flow_controller.message_limit).must_equal 1000 # 10 * AsyncPublisher#max_messages
_(flow_controller.byte_limit).must_equal 10_000_000 # 10 * AsyncPublisher#max_bytes
_(flow_controller.limit_exceeded_behavior).must_equal :ignore
end
it "knows its custom settings" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 123,
byte_limit: 456,
limit_exceeded_behavior: :block
)
_(flow_controller.message_limit).must_equal 123
_(flow_controller.byte_limit).must_equal 456
_(flow_controller.limit_exceeded_behavior).must_equal :block
end
it "raises when limit_exceeded_behavior is illegal value" do
expect do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 10_000_000,
limit_exceeded_behavior: :badvalue
)
end.must_raise ArgumentError
end
describe "ignore" do
it "does not raise or block when > message_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1,
byte_limit: 10_000_000,
limit_exceeded_behavior: :ignore
)
flow_controller.acquire 3
flow_controller.acquire 3
end
it "does not raise or block when > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 1,
limit_exceeded_behavior: :ignore
)
flow_controller.acquire 3
flow_controller.acquire 3
end
end
describe "error" do
it "does not raise when <= message_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 2,
byte_limit: 10_000_000,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.acquire 3
end
it "does not raise when <= byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3 * 2,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.acquire 3
end
it "raises when > message_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1,
byte_limit: 10_000_000,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.release 3
flow_controller.acquire 3
expect do
flow_controller.acquire 3
end.must_raise Google::Cloud::PubSub::FlowControlLimitError
end
it "raises when > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.release 3
flow_controller.acquire 3
expect do
flow_controller.acquire 3
end.must_raise Google::Cloud::PubSub::FlowControlLimitError
end
it "does not raise when sufficient capacity is released before new acquires" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 2,
byte_limit: 3 * 2,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.acquire 3
flow_controller.release 3
flow_controller.acquire 3
flow_controller.release 3
flow_controller.acquire 3
end
it "raises if too many message limit releases" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1,
byte_limit: 10_000_000,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.release 3
expect do
flow_controller.release 3
end.must_raise RuntimeError
end
it "raises if too many byte limit releases" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3,
limit_exceeded_behavior: :error
)
flow_controller.acquire 3
flow_controller.release 3
expect do
flow_controller.release 3
end.must_raise RuntimeError
end
end
describe "block" do
it "does not block when <= message_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 3,
byte_limit: 10_000_000,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
run_in_thread flow_controller, :acquire, 3, adding_2_done
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
run_in_thread flow_controller, :acquire, 3, adding_3_done
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 never unblocked."
end
it "does not block when <= byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3 * 3,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
run_in_thread flow_controller, :acquire, 3, adding_2_done
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
run_in_thread flow_controller, :acquire, 3, adding_3_done
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 never unblocked."
end
it "raises when a single message is > message_limit" do
expect do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 0, # Non-sane setting
byte_limit: 10_000_000,
limit_exceeded_behavior: :block
)
end.must_raise ArgumentError
end
it "raises when a single message is > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3,
limit_exceeded_behavior: :block
)
expect do
flow_controller.acquire 3 * 2
end.must_raise Google::Cloud::PubSub::FlowControlLimitError
end
it "blocks when > message_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1,
byte_limit: 100,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
_(flow_controller.outstanding_bytes).must_equal 0
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_2_done
refute adding_2_done.wait(0.1), "Adding message 2 did not block."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_3_done
refute adding_3_done.wait(0.1), "Adding message 3 did not block."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 3, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 errored."
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 3, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 errored."
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 3, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 errored."
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
end
it "blocks when > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_2_done
refute adding_2_done.wait(0.1), "Adding message 2 did not block."
run_in_thread flow_controller, :acquire, 3, adding_3_done
refute adding_3_done.wait(0.1), "Adding message 3 did not block."
run_in_thread flow_controller, :release, 3, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 errored."
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 errored."
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 3, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 errored."
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
end
it "blocks when insufficient bytes available" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 4,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 3, adding_2_done
refute adding_2_done.wait(0.1), "Adding message 2 did not block."
run_in_thread flow_controller, :acquire, 3, adding_3_done
refute adding_3_done.wait(0.1), "Adding message 3 did not block."
run_in_thread flow_controller, :release, 3, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 errored."
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
run_in_thread flow_controller, :release, 3, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 errored."
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 3, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 errored."
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
end
it "unblocks all waiting acquires to byte_limit after a single release of sufficient bytes" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 3,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
adding_4_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
run_in_thread flow_controller, :acquire, 3, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 1, adding_2_done
refute adding_2_done.wait(0.1), "Adding message 2 did not block."
run_in_thread flow_controller, :acquire, 1, adding_3_done
refute adding_3_done.wait(0.1), "Adding message 3 did not block."
run_in_thread flow_controller, :acquire, 1, adding_4_done
refute adding_4_done.wait(0.1), "Adding message 4 did not block."
flow_controller.release 3
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
assert adding_3_done.wait(0.1), "Adding message 2 never unblocked."
assert adding_4_done.wait(0.1), "Adding message 2 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
end
it "blocks but does not starve large messages when > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 110,
limit_exceeded_behavior: :block
)
large_msg = 100 # close to entire byte limit
adding_initial_done = Concurrent::Event.new
adding_large_done = Concurrent::Event.new
adding_busy_done = Concurrent::Event.new
releasing_busy_done = Concurrent::Event.new
releasing_large_done = Concurrent::Event.new
releasing_initial_done = Concurrent::Event.new
# Occupy some of the flow capacity, then try to add a large message. Releasing
# enough messages should eventually allow the large message to come through, even
# if more messages are added after it (those should wait for the large message).
initial_messages = Array.new(5) { 10 }
run_in_thread flow_controller, :acquire, initial_messages, adding_initial_done
assert adding_initial_done.wait(0.1), "Adding initial messages blocked or errored."
_(flow_controller.outstanding_bytes).must_equal 50 # Implementation detail
run_in_thread flow_controller, :acquire, large_msg, adding_large_done
# Continuously keep adding more messages after the large one.
messages = Array.new(10) { 10 }
run_in_thread flow_controller, :acquire, messages, adding_busy_done, action_pause: 0.1
# At the same time, gradually keep releasing the messages - the released
# capacity should be consumed by the large message, not the other small messages
# being added after it.
run_in_thread flow_controller, :release, messages, releasing_busy_done, action_pause: 0.1
# Sanity check - releasing should have completed by now.
assert releasing_busy_done.wait(2), "Releasing messages blocked or errored."
# Enough messages released, the large message should have come through in the meantime.
assert adding_large_done.wait(0.1), "A thread adding a large message starved."
refute adding_busy_done.wait(0.1), "Adding multiple small messages did not block."
# Releasing the large message should unblock adding the remaining "busy" messages
# that have not been added yet.
run_in_thread flow_controller, :release, large_msg, releasing_large_done
assert releasing_large_done.wait(0.1), "Releasing a message blocked or errored."
assert adding_busy_done.wait(1.0), "Adding messages blocked or errored."
_(flow_controller.outstanding_bytes).must_equal 50 # Implementation detail
run_in_thread flow_controller, :release, initial_messages, releasing_initial_done
assert releasing_initial_done.wait(0.1), "Releasing initial messages blocked or errored."
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
end
it "blocks in a queue of waiting threads that avoids deadlocks" do
50.times do |i|
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 1000,
byte_limit: 100,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
_(flow_controller.outstanding_bytes).must_equal 0
run_in_thread flow_controller, :acquire, 40, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
run_in_thread flow_controller, :acquire, 80, adding_2_done
refute adding_2_done.wait(0.1), "Adding message 2 did not block."
flow_controller.release 40
run_in_thread flow_controller, :acquire, 50, adding_3_done
assert adding_2_done.wait(1), "Adding message 2 never unblocked. test loop: #{i}"
flow_controller.release 80
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked. test loop: #{i}"
end
end
it "blocks when > message_limit and > byte_limit" do
flow_controller = Google::Cloud::PubSub::FlowController.new(
message_limit: 2,
byte_limit: 3,
limit_exceeded_behavior: :block
)
adding_1_done = Concurrent::Event.new
adding_2_done = Concurrent::Event.new
adding_3_done = Concurrent::Event.new
releasing_1_done = Concurrent::Event.new
releasing_2_done = Concurrent::Event.new
releasing_3_done = Concurrent::Event.new
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
run_in_thread flow_controller, :acquire, 1, adding_1_done
assert adding_1_done.wait(0.1), "Adding message 1 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 1 # Implementation detail
run_in_thread flow_controller, :acquire, 2, adding_2_done
assert adding_2_done.wait(0.1), "Adding message 2 never unblocked."
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :acquire, 2, adding_3_done
refute adding_3_done.wait(0.1), "Adding message 3 did not block."
_(flow_controller.awaiting.count).must_equal 1 # Implementation detail
_(flow_controller.outstanding_bytes).must_equal 3 # Implementation detail
run_in_thread flow_controller, :release, 1, releasing_1_done
assert releasing_1_done.wait(0.1), "Releasing message 1 errored."
# Msg 3 is no longer blocked by message limit, but still blocked by byte limit.
refute adding_3_done.wait(0.1), "Adding message 3 did not remain blocked."
_(flow_controller.awaiting.count).must_equal 1 # Implementation detail
_(flow_controller.outstanding_messages).must_equal 1 # Implementation detail
_(flow_controller.outstanding_bytes).must_equal 2 # Implementation detail
run_in_thread flow_controller, :release, 2, releasing_2_done
assert releasing_2_done.wait(0.1), "Releasing message 2 errored."
assert adding_3_done.wait(0.1), "Adding message 3 never unblocked."
_(flow_controller.awaiting.count).must_equal 0 # Implementation detail
_(flow_controller.outstanding_bytes).must_equal 2 # Implementation detail
run_in_thread flow_controller, :release, 2, releasing_3_done
assert releasing_3_done.wait(0.1), "Releasing message 3 errored."
_(flow_controller.outstanding_bytes).must_equal 0 # Implementation detail
end
end
def pubsub_message data
Google::Cloud::PubSub::V1::PubsubMessage.new data: data
end
# Run flow controller action in a separate thread.
def run_in_thread flow_controller,
action,
messages,
all_done_event,
error_event: nil,
action_pause: nil
assert [:acquire, :release].include? action
Thread.new do
begin
Array(messages).each do |msg|
sleep action_pause if action_pause
flow_controller.send action, msg
end
all_done_event.set
rescue FlowControlLimitError => e
error_event.set if error_event
end
end
end
end
|
dazuma/google-cloud-ruby
|
google-cloud-pubsub/test/google/cloud/pubsub/flow_controller_test.rb
|
Ruby
|
apache-2.0
| 22,645
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
scale = 1.0
def sleep(secs):
import time
time.sleep(secs*scale)
|
phrocker/accumulo
|
test/system/auto/sleep.py
|
Python
|
apache-2.0
| 856
|
# frozen_string_literal: true
if defined?(ChefSpec)
def create_icinga2_apilistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_apilistener, :create, name)
end
def delete_icinga2_apilistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_apilistener, :delete, name)
end
def create_icinga2_apiuser(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_apiuser, :create, name)
end
def delete_icinga2_apiuser(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_apiuser, :delete, name)
end
def create_icinga2_applydependency(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applydependency, :create, name)
end
def delete_icinga2_applydependency(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applydependency, :delete, name)
end
def create_icinga2_applynotification(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applynotification, :create, name)
end
def delete_icinga2_applynotification(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applynotification, :delete, name)
end
def create_icinga2_applyservice(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applyservice, :create, name)
end
def delete_icinga2_applyservice(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_applyservice, :delete, name)
end
def create_icinga2_checkcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_checkcommand, :create, name)
end
def delete_icinga2_checkcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_checkcommand, :delete, name)
end
def create_icinga2_checkresultreader(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_checkresultreader, :create, name)
end
def delete_icinga2_checkresultreader(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_checkresultreader, :delete, name)
end
def create_icinga2_compatlogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_compatlogger, :create, name)
end
def delete_icinga2_compatlogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_compatlogger, :delete, name)
end
def create_icinga2_endpoint(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_endpoint, :create, name)
end
def delete_icinga2_endpoint(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_endpoint, :delete, name)
end
def create_icinga2_envhostgroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_envhostgroup, :create, name)
end
def delete_icinga2_envhostgroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_envhostgroup, :delete, name)
end
def create_icinga2_environment(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_environment, :create, name)
end
def delete_icinga2_environment(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_environment, :delete, name)
end
def create_icinga2_eventcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_eventcommand, :create, name)
end
def delete_icinga2_eventcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_eventcommand, :delete, name)
end
def create_icinga2_externalcommandlistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_externalcommandlistener, :create, name)
end
def delete_icinga2_externalcommandlistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_externalcommandlistener, :delete, name)
end
def create_icinga2_feature(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_feature, :create, name)
end
def delete_icinga2_feature(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_feature, :delete, name)
end
def create_icinga2_filelogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_filelogger, :create, name)
end
def delete_icinga2_filelogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_filelogger, :delete, name)
end
def create_icinga2_gelfwriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_gelfwriter, :create, name)
end
def delete_icinga2_gelfwriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_gelfwriter, :delete, name)
end
def create_icinga2_graphitewriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_graphitewriter, :create, name)
end
def delete_icinga2_graphitewriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_graphitewriter, :delete, name)
end
def create_icinga2_influxdbwriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_influxdbwriter, :create, name)
end
def delete_icinga2_influxdbwriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_influxdbwriter, :delete, name)
end
def create_icinga2_host(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_host, :create, name)
end
def delete_icinga2_host(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_host, :delete, name)
end
def create_icinga2_hostgroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_hostgroup, :create, name)
end
def delete_icinga2_hostgroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_hostgroup, :delete, name)
end
def create_icinga2_idomysqlconnection(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_idomysqlconnection, :create, name)
end
def delete_icinga2_idomysqlconnection(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_idomysqlconnection, :delete, name)
end
def create_icinga2_idopgsqlconnection(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_idopgsqlconnection, :create, name)
end
def delete_icinga2_idopgsqlconnection(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_idopgsqlconnection, :delete, name)
end
def create_icinga2_livestatuslistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_livestatuslistener, :create, name)
end
def delete_icinga2_livestatuslistener(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_livestatuslistener, :delete, name)
end
def create_icinga2_notification(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notification, :create, name)
end
def delete_icinga2_notification(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notification, :delete, name)
end
def create_icinga2_notificationcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notificationcommand, :create, name)
end
def delete_icinga2_notificationcommand(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notificationcommand, :delete, name)
end
def create_icinga2_notificationcomponent(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notificationcomponent, :create, name)
end
def delete_icinga2_notificationcomponent(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_notificationcomponent, :delete, name)
end
def create_icinga2_perfdatawriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_perfdatawriter, :create, name)
end
def delete_icinga2_perfdatawriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_perfdatawriter, :delete, name)
end
def create_icinga2_scheduleddowntime(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_scheduleddowntime, :create, name)
end
def delete_icinga2_scheduleddowntime(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_scheduleddowntime, :delete, name)
end
def create_icinga2_script(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_script, :create, name)
end
def delete_icinga2_script(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_script, :delete, name)
end
def create_icinga2_service(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_service, :create, name)
end
def delete_icinga2_service(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_service, :delete, name)
end
def create_icinga2_servicegroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_servicegroup, :create, name)
end
def delete_icinga2_servicegroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_servicegroup, :delete, name)
end
def create_icinga2_statusdatawriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_statusdatawriter, :create, name)
end
def delete_icinga2_statusdatawriter(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_statusdatawriter, :delete, name)
end
def create_icinga2_sysloglogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_sysloglogger, :create, name)
end
def delete_icinga2_sysloglogger(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_sysloglogger, :delete, name)
end
def create_icinga2_timeperiod(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_timeperiod, :create, name)
end
def delete_icinga2_timeperiod(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_timeperiod, :delete, name)
end
def create_icinga2_user(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_user, :create, name)
end
def delete_icinga2_user(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_user, :delete, name)
end
def create_icinga2_usergroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_usergroup, :create, name)
end
def delete_icinga2_usergroup(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_usergroup, :delete, name)
end
def create_icinga2_zone(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_zone, :create, name)
end
def delete_icinga2_zone(name)
ChefSpec::Matchers::ResourceMatcher
.new(:icinga2_zone, :delete, name)
end
end
|
vkhatri/chef-icinga2-1
|
spec/support/matchers.rb
|
Ruby
|
apache-2.0
| 10,159
|
package org.semanticweb.elk.owl.inferences;
import org.semanticweb.elk.owl.interfaces.ElkAxiom;
/*
* #%L
* ELK Proofs Package
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.semanticweb.elk.owl.interfaces.ElkClassExpression;
import org.semanticweb.elk.owl.interfaces.ElkObject;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyExpression;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyRangeAxiom;
import org.semanticweb.elk.owl.interfaces.ElkSubClassOfAxiom;
/**
* Represents the inference:
*
* <pre>
* (1) (2)
* C ⊑ ∃R.Self ObjectPropertyRange(R D)
* ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯
* C ⊑ D
* </pre>
*
* @author Yevgeny Kazakov
*
*/
public class ElkClassInclusionReflexivePropertyRange
extends AbstractElkInference {
public final static String NAME = "Reflexive Property Range";
private final ElkClassExpression subClass_, range_;
private final ElkObjectPropertyExpression property_;
ElkClassInclusionReflexivePropertyRange(ElkClassExpression subClass,
ElkObjectPropertyExpression property, ElkClassExpression range) {
this.subClass_ = subClass;
this.property_ = property;
this.range_ = range;
}
public ElkClassExpression getSubClass() {
return subClass_;
}
public ElkObjectPropertyExpression getProperty() {
return property_;
}
public ElkClassExpression getRange() {
return range_;
}
@Override
public String getName() {
return NAME;
}
@Override
public int getPremiseCount() {
return 2;
}
@Override
public ElkAxiom getPremise(int index, ElkObject.Factory factory) {
switch (index) {
case 0:
return getFirstPremise(factory);
case 1:
return getSecondPremise(factory);
default:
return failGetPremise(index);
}
}
public ElkSubClassOfAxiom getFirstPremise(ElkObject.Factory factory) {
return factory.getSubClassOfAxiom(subClass_,
factory.getObjectHasSelf(property_));
}
public ElkObjectPropertyRangeAxiom getSecondPremise(
ElkObject.Factory factory) {
return factory.getObjectPropertyRangeAxiom(property_, range_);
}
@Override
public ElkSubClassOfAxiom getConclusion(ElkObject.Factory factory) {
return factory.getSubClassOfAxiom(subClass_, range_);
}
@Override
public <O> O accept(ElkInference.Visitor<O> visitor) {
return visitor.visit(this);
}
/**
* A factory for creating instances
*
* @author Yevgeny Kazakov
*
*/
public interface Factory {
ElkClassInclusionReflexivePropertyRange getElkClassInclusionReflexivePropertyRange(
ElkClassExpression subClass,
ElkObjectPropertyExpression property, ElkClassExpression range);
}
/**
* The visitor pattern for instances
*
* @author Yevgeny Kazakov
*
* @param <O>
* the type of the output
*/
interface Visitor<O> {
O visit(ElkClassInclusionReflexivePropertyRange inference);
}
}
|
liveontologies/elk-reasoner
|
elk-proofs/src/main/java/org/semanticweb/elk/owl/inferences/ElkClassInclusionReflexivePropertyRange.java
|
Java
|
apache-2.0
| 3,617
|
<script>
window.location = "/app/index.html";
</script>
|
qwefgh90/handyfinder
|
src/test/resources/appdata/app/redirect.html
|
HTML
|
apache-2.0
| 56
|
/* Copyright 2007 Alin Dreghiciu.
* Copyright 2010 Achim Nierbeck.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ops4j.pax.web.deployer.internal;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.felix.fileinstall.ArtifactUrlTransformer;
import org.osgi.service.component.annotations.Component;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An Apache Felix FileInstall transform for WAR files.
*
* @author Alin Dreghiciu, Achim Nierbeck
*/
@Component
public class WarDeployer implements ArtifactUrlTransformer {
/**
* Logger.
*/
private static final Logger LOG = LoggerFactory.getLogger(WarDeployer.class);
/**
* Standard PATH separator
*/
private static final String PATH_SEPERATOR = "/";
public boolean canHandle(final File artifact) {
JarFile jar = null;
try {
// the file needs to either end with .war
// or with .war.jar (_jar upped_ by DirectoryWatcher)
if (!artifact.getName().endsWith(".war") && !artifact.getName().endsWith(".war.jar")) {
return false;
}
jar = new JarFile(artifact);
JarEntry entry = jar.getJarEntry("WEB-INF/web.xml");
// Only handle WAR artifacts
if (entry == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("No war file do not handle artifact:"
+ artifact.getName());
}
return false;
}
// Only handle non OSGi bundles
Manifest m = jar.getManifest();
if (m != null
&& m.getMainAttributes().getValue(
new Attributes.Name("Bundle-SymbolicName")) != null
&& m.getMainAttributes().getValue(
new Attributes.Name("Bundle-Version")) != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("This artifact has OSGi Manifest Header skipping: "
+ artifact.getName());
}
return false;
}
//CHECKSTYLE:OFF
} catch (Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace("Can't handle file " + artifact.getName(), e);
}
return false;
} finally {
if (jar != null) {
try {
jar.close();
} catch (IOException ignore) {
if (LOG.isDebugEnabled()) {
LOG.debug("failed to close war file", ignore);
}
}
}
}
//CHECKSTYLE:ON
try {
new URL("webbundle", null, artifact.toURI().toURL()
.toExternalForm());
} catch (MalformedURLException e) {
LOG.warn(String
.format("File %s could not be transformed. Most probably that Pax URL WAR handler is not installed",
artifact.getAbsolutePath()));
return false;
}
return true;
}
public URL transform(final URL artifact) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("Transforming artifact with URL: " + artifact);
}
final String path = artifact.getPath();
final String protocol = artifact.getProtocol();
if (path != null) {
int idx = -1;
// match the last slash to retrieve the name of the archive
if ("jardir".equalsIgnoreCase(protocol)) {
// just to make sure this works on all kinds of windows
File fileInstance = new File(path);
// with a jardir this is system specific
idx = fileInstance.getAbsolutePath()
.lastIndexOf(File.separator);
} else {
// a standard file is not system specific, this is always a
// standardized URL path
idx = path.lastIndexOf(PATH_SEPERATOR);
}
// match the suffix so we get rid of it for displaying
if (idx > 0) {
final String[] name = DeployerUtils.extractNameVersionType(path
.substring(idx + 1));
final StringBuilder url = new StringBuilder();
url.append(artifact.toExternalForm());
if (artifact.toExternalForm().contains("?")) {
url.append("&");
} else {
url.append("?");
}
url.append("Web-ContextPath=").append(name[0]);
url.append("&");
url.append("Bundle-SymbolicName=").append(name[0]);
url.append("&");
url.append("Bundle-Version=").append(name[1]);
LOG.debug("Transformed URL of {} to following {}", path, url);
return new URL("webbundle", null, url.toString());
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("No path for given artifact, retry with webbundle prepended");
}
return new URL("webbundle", null, artifact.toExternalForm());
}
}
|
stsiano/org.ops4j.pax.web
|
pax-web-deployer/src/main/java/org/ops4j/pax/web/deployer/internal/WarDeployer.java
|
Java
|
apache-2.0
| 5,024
|
/*
* ----------------------------------------
* Jenkins Test Tracker
* ----------------------------------------
* Produced by Dan Grew
* 2016
* ----------------------------------------
*/
package uk.dangrew.jtt.desktop.graphics;
/**
* Implementation of {@link PlatformDecoupler} to simply run the {@link Runnable}
* without the {@link PlatformImpl} thread. This is used for testing.
*/
public class TestPlatformDecouplerImpl implements PlatformDecoupler {
private final Runnable recorder;
/**
* Constructs a new {@link TestPlatformDecouplerImpl}.
*/
public TestPlatformDecouplerImpl() {
this( null );
}//End Constructor
/**
* Constructs a new {@link TestPlatformDecouplerImpl}.
* @param recorder the {@link Runnable} to call when this is invoked.
*/
public TestPlatformDecouplerImpl( Runnable recorder ) {
this.recorder = recorder;
}//End Constructor
/**
* {@inheritDoc}
*/
@Override public void run( Runnable runnable ) {
runnable.run();
if ( recorder != null ) recorder.run();
}//End Method
}//End Class
|
DanGrew/JenkinsTestTracker
|
JttDesktop/test/uk/dangrew/jtt/desktop/graphics/TestPlatformDecouplerImpl.java
|
Java
|
apache-2.0
| 1,154
|
<!DOCTYPE html>
<html>
<head>
<meta name="viewport" content="width=device-width">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>N O L A N</title>
<style type="text/css">
@media only screen and (max-width: 620px) {
table[class=body] h1 {
font-size: 28px !important;
margin-bottom: 10px !important; }
table[class=body] p,
table[class=body] ul,
table[class=body] ol,
table[class=body] td,
table[class=body] span,
table[class=body] a {
font-size: 16px !important;}
table[class=body] .wrapper,
table[class=body] .article {
padding: 10px !important; }
table[class=body] .content {
padding: 0 !important; }
table[class=body] .container {
padding: 0 !important;
width: 100% !important; }
table[class=body] .main {
border-left-width: 0 !important;
border-radius: 0 !important;
border-right-width: 0 !important; }
table[class=body] .btn table {
width: 100% !important; }
table[class=body] .btn a {
width: 100% !important; }
table[class=body] .img-responsive {
height: auto !important;
max-width: 100% !important;
width: auto !important; }}
@media all {
.ExternalClass {
width: 100%; }
.ExternalClass,
.ExternalClass p,
.ExternalClass span,
.ExternalClass font,
.ExternalClass td,
.ExternalClass div {
line-height: 100%; }
.apple-link a {
color: inherit !important;
font-family: inherit !important;
font-size: inherit !important;
font-weight: inherit !important;
line-height: inherit !important;
text-decoration: none !important; }
.btn-primary table td:hover {
background-color: #34495e !important; }
.btn-primary a:hover {
background-color: #34495e !important;
border-color: #34495e !important; } }
</style>
</head>
<body class="" style="background-color:#f6f6f6;font-family:sans-serif;-webkit-font-smoothing:antialiased;font-size:14px;line-height:1.4;margin:0;padding:0;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%;">
<table border="0" cellpadding="0" cellspacing="0" class="body" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;background-color:#f6f6f6;width:100%;">
<tr>
<td style="font-family:sans-serif;font-size:14px;vertical-align:top;"> </td>
<td class="container" style="font-family:sans-serif;font-size:14px;vertical-align:top;display:block;max-width:580px;padding:10px;width:580px;Margin:0 auto !important;">
<div class="content" style="box-sizing:border-box;display:block;Margin:0 auto;max-width:580px;padding:10px;">
<!-- START CENTERED WHITE CONTAINER -->
<span class="preheader" style="color:transparent;display:none;height:0;max-height:0;max-width:0;opacity:0;overflow:hidden;mso-hide:all;visibility:hidden;width:0;">Confirmación de Registro en NOLAN.</span>
<table class="main" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;background:#fff;border-radius:3px;width:100%;">
<!-- START MAIN CONTENT AREA -->
<tr>
<td class="wrapper" style="font-family:sans-serif;font-size:14px;vertical-align:top;box-sizing:border-box;padding:20px;">
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;width:100%;">
<tr>
<td style="font-family:sans-serif;font-size:14px;vertical-align:top;">
<p style="font-family:sans-serif;font-size:16px;font-weight:normal;margin:0;Margin-bottom:15px;">¡ Registro Satisfactorio !,</p>
<p style="font-family:sans-serif;font-size:13px;font-weight:normal;margin:0;Margin-bottom:15px;">Bienvenido <strong>$COMPANY_NAME</strong>, se ha generado un registro con una contraseña temporal para el acceso a la plataforma NOLAN, tus datos son los siguientes:</p>
<span style="font-family:'Courier New';font-size:13px;font-weight:normal;margin:0;Margin-bottom:15px;">
Usuario: <strong>$USERID</strong>
</span>
<br/>
<span style="font-family:'Courier New';font-size:13px;font-weight:normal;margin:0;Margin-bottom:15px;">
Contraseña: <strong>$PASSWORD</strong>
</span>
<p style="font-family:sans-serif;font-size:13px;font-weight:normal">Puedes usar el siguiente botón para ingresar a la plataforma:</p>
<table border="0" cellpadding="0" cellspacing="0" class="btn btn-primary" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;box-sizing:border-box;width:100%;">
<tbody>
<tr>
<td align="left" style="font-family:sans-serif;font-size:14px;vertical-align:top;padding-bottom:15px;">
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;width:100%;width:auto;">
<tbody>
<tr>
<td style="font-family:sans-serif;font-size:13px;vertical-align:top;background-color:#ffffff;border-radius:5px;text-align:center;background-color:#3498db;"> <a href="$BASE_URL" target="_blank" style="text-decoration:underline;background-color:#ffffff;border:solid 1px #3498db;border-radius:5px;box-sizing:border-box;color:#3498db;cursor:pointer;display:inline-block;font-size:13px;font-weight:bold;margin:0;padding:5px 15px;text-decoration:none;background-color:#3498db;border-color:#3498db;color:#ffffff;">Ingresar a NOLAN</a> </td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
<p style="font-family:sans-serif;font-size:13px;font-weight:normal;margin:0;Margin-bottom:15px;">Al ingresar por primera vez a la plataforma te pedirá un cambio de contraseña. Una vez iniciada sesión puede cambiar su contraseña o sus datos en cualquier momento.</p>
<p style="font-family:sans-serif;font-size:13px;font-weight:normal;margin:0;Margin-bottom:15px;">Gracias por usar NOLAN.</p>
</td>
</tr>
</table>
</td>
</tr>
<!-- END MAIN CONTENT AREA -->
</table>
<!-- START FOOTER -->
<div class="footer" style="clear:both;padding-top:10px;text-align:center;width:100%;">
<table border="0" cellpadding="0" cellspacing="0" style="border-collapse:separate;mso-table-lspace:0pt;mso-table-rspace:0pt;width:100%;">
<tr>
<td class="content-block" style="font-family:sans-serif;font-size:14px;vertical-align:top;color:#999999;font-size:12px;text-align:center;">
<span class="apple-link" style="color:#999999;font-size:12px;text-align:center;">Este es un correo enviado automáticamente, por favor no responder a este correo.</span>
<br>
Correo de contacto: <a href="mailto:joandelgado18@gmail.com">joandelgado18@gmail.com</a>.
</td>
</tr>
</table>
</div>
<!-- END FOOTER -->
<!-- END CENTERED WHITE CONTAINER -->
</div>
</td>
<td style="font-family:sans-serif;font-size:14px;vertical-align:top;"> </td>
</tr>
</table>
</body>
</html>
|
eschuler7/compromisos
|
templates/template_security_registration.html
|
HTML
|
apache-2.0
| 8,066
|
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexing.overlord.autoscaling;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.collect.Collections2;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import com.metamx.common.ISE;
import com.metamx.emitter.EmittingLogger;
import io.druid.indexing.overlord.RemoteTaskRunner;
import io.druid.indexing.overlord.RemoteTaskRunnerWorkItem;
import io.druid.indexing.overlord.TaskRunnerWorkItem;
import io.druid.indexing.overlord.ZkWorker;
import io.druid.indexing.overlord.setup.WorkerBehaviorConfig;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
*/
public class SimpleResourceManagementStrategy implements ResourceManagementStrategy
{
private static final EmittingLogger log = new EmittingLogger(SimpleResourceManagementStrategy.class);
private final SimpleResourceManagementConfig config;
private final Supplier<WorkerBehaviorConfig> workerConfigRef;
private final ScalingStats scalingStats;
private final Object lock = new Object();
private final Set<String> currentlyProvisioning = Sets.newHashSet();
private final Set<String> currentlyTerminating = Sets.newHashSet();
private int targetWorkerCount = -1;
private DateTime lastProvisionTime = new DateTime();
private DateTime lastTerminateTime = new DateTime();
@Inject
public SimpleResourceManagementStrategy(
SimpleResourceManagementConfig config,
Supplier<WorkerBehaviorConfig> workerConfigRef
)
{
this.config = config;
this.workerConfigRef = workerConfigRef;
this.scalingStats = new ScalingStats(config.getNumEventsToTrack());
}
@Override
public boolean doProvision(RemoteTaskRunner runner)
{
Collection<RemoteTaskRunnerWorkItem> pendingTasks = runner.getPendingTasks();
Collection<ZkWorker> zkWorkers = runner.getWorkers();
synchronized (lock) {
boolean didProvision = false;
final WorkerBehaviorConfig workerConfig = workerConfigRef.get();
if (workerConfig == null || workerConfig.getAutoScaler() == null) {
log.warn("No workerConfig available, cannot provision new workers.");
return false;
}
final Predicate<ZkWorker> isValidWorker = createValidWorkerPredicate(config);
final int currValidWorkers = Collections2.filter(zkWorkers, isValidWorker).size();
final List<String> workerNodeIds = workerConfig.getAutoScaler().ipToIdLookup(
Lists.newArrayList(
Iterables.<ZkWorker, String>transform(
zkWorkers,
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
);
currentlyProvisioning.removeAll(workerNodeIds);
updateTargetWorkerCount(workerConfig, pendingTasks, zkWorkers);
int want = targetWorkerCount - (currValidWorkers + currentlyProvisioning.size());
while (want > 0) {
final AutoScalingData provisioned = workerConfig.getAutoScaler().provision();
final List<String> newNodes;
if (provisioned == null || (newNodes = provisioned.getNodeIds()).isEmpty()) {
break;
} else {
currentlyProvisioning.addAll(newNodes);
lastProvisionTime = new DateTime();
scalingStats.addProvisionEvent(provisioned);
want -= provisioned.getNodeIds().size();
didProvision = true;
}
}
if (!currentlyProvisioning.isEmpty()) {
Duration durSinceLastProvision = new Duration(lastProvisionTime, new DateTime());
log.info("%s provisioning. Current wait time: %s", currentlyProvisioning, durSinceLastProvision);
if (durSinceLastProvision.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node provisioning taking too long!")
.addData("millisSinceLastProvision", durSinceLastProvision.getMillis())
.addData("provisioningCount", currentlyProvisioning.size())
.emit();
workerConfig.getAutoScaler().terminateWithIds(Lists.newArrayList(currentlyProvisioning));
currentlyProvisioning.clear();
}
}
return didProvision;
}
}
@Override
public boolean doTerminate(RemoteTaskRunner runner)
{
Collection<RemoteTaskRunnerWorkItem> pendingTasks = runner.getPendingTasks();
synchronized (lock) {
final WorkerBehaviorConfig workerConfig = workerConfigRef.get();
if (workerConfig == null) {
log.warn("No workerConfig available, cannot terminate workers.");
return false;
}
boolean didTerminate = false;
final Set<String> workerNodeIds = Sets.newHashSet(
workerConfig.getAutoScaler().ipToIdLookup(
Lists.newArrayList(
Iterables.transform(
runner.getLazyWorkers(),
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker input)
{
return input.getWorker().getIp();
}
}
)
)
)
);
final Set<String> stillExisting = Sets.newHashSet();
for (String s : currentlyTerminating) {
if (workerNodeIds.contains(s)) {
stillExisting.add(s);
}
}
currentlyTerminating.clear();
currentlyTerminating.addAll(stillExisting);
Collection<ZkWorker> workers = runner.getWorkers();
updateTargetWorkerCount(workerConfig, pendingTasks, workers);
if (currentlyTerminating.isEmpty()) {
final int excessWorkers = (workers.size() + currentlyProvisioning.size()) - targetWorkerCount;
if (excessWorkers > 0) {
final Predicate<ZkWorker> isLazyWorker = createLazyWorkerPredicate(config);
final List<String> laziestWorkerIps =
Lists.transform(
runner.markWorkersLazy(isLazyWorker, excessWorkers),
new Function<ZkWorker, String>()
{
@Override
public String apply(ZkWorker zkWorker)
{
return zkWorker.getWorker().getIp();
}
}
);
if (laziestWorkerIps.isEmpty()) {
log.info("Wanted to terminate %,d workers, but couldn't find any lazy ones!", excessWorkers);
} else {
log.info(
"Terminating %,d workers (wanted %,d): %s",
laziestWorkerIps.size(),
excessWorkers,
Joiner.on(", ").join(laziestWorkerIps)
);
final AutoScalingData terminated = workerConfig.getAutoScaler().terminate(laziestWorkerIps);
if (terminated != null) {
currentlyTerminating.addAll(terminated.getNodeIds());
lastTerminateTime = new DateTime();
scalingStats.addTerminateEvent(terminated);
didTerminate = true;
}
}
}
} else {
Duration durSinceLastTerminate = new Duration(lastTerminateTime, new DateTime());
log.info("%s terminating. Current wait time: %s", currentlyTerminating, durSinceLastTerminate);
if (durSinceLastTerminate.isLongerThan(config.getMaxScalingDuration().toStandardDuration())) {
log.makeAlert("Worker node termination taking too long!")
.addData("millisSinceLastTerminate", durSinceLastTerminate.getMillis())
.addData("terminatingCount", currentlyTerminating.size())
.emit();
currentlyTerminating.clear();
}
}
return didTerminate;
}
}
@Override
public ScalingStats getStats()
{
return scalingStats;
}
private static Predicate<ZkWorker> createLazyWorkerPredicate(
final SimpleResourceManagementConfig config
)
{
final Predicate<ZkWorker> isValidWorker = createValidWorkerPredicate(config);
return new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker worker)
{
final boolean itHasBeenAWhile = System.currentTimeMillis() - worker.getLastCompletedTaskTime().getMillis()
>= config.getWorkerIdleTimeout().toStandardDuration().getMillis();
return itHasBeenAWhile || !isValidWorker.apply(worker);
}
};
}
private static Predicate<ZkWorker> createValidWorkerPredicate(
final SimpleResourceManagementConfig config
)
{
return new Predicate<ZkWorker>()
{
@Override
public boolean apply(ZkWorker zkWorker)
{
final String minVersion = config.getWorkerVersion();
if (minVersion == null) {
throw new ISE("No minVersion found! It should be set in your runtime properties or configuration database.");
}
return zkWorker.isValidVersion(minVersion);
}
};
}
private void updateTargetWorkerCount(
final WorkerBehaviorConfig workerConfig,
final Collection<RemoteTaskRunnerWorkItem> pendingTasks,
final Collection<ZkWorker> zkWorkers
)
{
synchronized (lock) {
final Collection<ZkWorker> validWorkers = Collections2.filter(
zkWorkers,
createValidWorkerPredicate(config)
);
final Predicate<ZkWorker> isLazyWorker = createLazyWorkerPredicate(config);
final int minWorkerCount = workerConfig.getAutoScaler().getMinNumWorkers();
final int maxWorkerCount = workerConfig.getAutoScaler().getMaxNumWorkers();
if (minWorkerCount > maxWorkerCount) {
log.error("Huh? minWorkerCount[%d] > maxWorkerCount[%d]. I give up!", minWorkerCount, maxWorkerCount);
return;
}
if (targetWorkerCount < 0) {
// Initialize to size of current worker pool, subject to pool size limits
targetWorkerCount = Math.max(
Math.min(
zkWorkers.size(),
maxWorkerCount
),
minWorkerCount
);
log.info(
"Starting with a target of %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
minWorkerCount,
maxWorkerCount
);
}
final boolean notTakingActions = currentlyProvisioning.isEmpty()
&& currentlyTerminating.isEmpty();
final boolean shouldScaleUp = notTakingActions
&& validWorkers.size() >= targetWorkerCount
&& targetWorkerCount < maxWorkerCount
&& (hasTaskPendingBeyondThreshold(pendingTasks)
|| targetWorkerCount < minWorkerCount);
final boolean shouldScaleDown = notTakingActions
&& validWorkers.size() == targetWorkerCount
&& targetWorkerCount > minWorkerCount
&& Iterables.any(validWorkers, isLazyWorker);
if (shouldScaleUp) {
targetWorkerCount = Math.max(targetWorkerCount + 1, minWorkerCount);
log.info(
"I think we should scale up to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
minWorkerCount,
maxWorkerCount
);
} else if (shouldScaleDown) {
targetWorkerCount = Math.min(targetWorkerCount - 1, maxWorkerCount);
log.info(
"I think we should scale down to %,d workers (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
minWorkerCount,
maxWorkerCount
);
} else {
log.info(
"Our target is %,d workers, and I'm okay with that (current = %,d, min = %,d, max = %,d).",
targetWorkerCount,
validWorkers.size(),
minWorkerCount,
maxWorkerCount
);
}
}
}
private boolean hasTaskPendingBeyondThreshold(Collection<RemoteTaskRunnerWorkItem> pendingTasks)
{
synchronized (lock) {
long now = System.currentTimeMillis();
for (TaskRunnerWorkItem pendingTask : pendingTasks) {
final Duration durationSinceInsertion = new Duration(pendingTask.getQueueInsertionTime().getMillis(), now);
final Duration timeoutDuration = config.getPendingTaskTimeout().toStandardDuration();
if (durationSinceInsertion.isEqual(timeoutDuration) || durationSinceInsertion.isLongerThan(timeoutDuration)) {
return true;
}
}
return false;
}
}
}
|
fjy/druid
|
indexing-service/src/main/java/io/druid/indexing/overlord/autoscaling/SimpleResourceManagementStrategy.java
|
Java
|
apache-2.0
| 14,050
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.bval.jsr.metadata;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
import java.lang.reflect.TypeVariable;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
public class ContainerElementKeyTest {
public static abstract class HasList {
public List<String> strings;
}
public static abstract class BoundListType implements List<String> {
}
private Field stringsField;
@Before
public void setup() throws Exception {
stringsField = HasList.class.getField("strings");
}
@Test
public void testBasic() {
final ContainerElementKey containerElementKey =
new ContainerElementKey(stringsField.getAnnotatedType(), Integer.valueOf(0));
assertEquals(List.class, containerElementKey.getContainerClass());
assertEquals(0, containerElementKey.getTypeArgumentIndex().intValue());
assertEquals(String.class, containerElementKey.getAnnotatedType().getType());
}
@Test
public void testAssignableKeys() {
final ContainerElementKey containerElementKey =
new ContainerElementKey(stringsField.getAnnotatedType(), Integer.valueOf(0));
final Iterator<ContainerElementKey> iterator = containerElementKey.getAssignableKeys().iterator();
{
assertTrue(iterator.hasNext());
final ContainerElementKey assignableKey = iterator.next();
assertEquals(Collection.class, assignableKey.getContainerClass());
assertEquals(0, assignableKey.getTypeArgumentIndex().intValue());
assertTrue(assignableKey.getAnnotatedType().getType() instanceof TypeVariable<?>);
}
{
assertTrue(iterator.hasNext());
final ContainerElementKey assignableKey = iterator.next();
assertEquals(Iterable.class, assignableKey.getContainerClass());
assertEquals(0, assignableKey.getTypeArgumentIndex().intValue());
assertTrue(assignableKey.getAnnotatedType().getType() instanceof TypeVariable<?>);
}
assertFalse(iterator.hasNext());
}
@Test
public void testAssignableKeysWithExplicitBinding() {
final ContainerElementKey containerElementKey = new ContainerElementKey(BoundListType.class, null);
final Iterator<ContainerElementKey> iterator = containerElementKey.getAssignableKeys().iterator();
{
assertTrue(iterator.hasNext());
final ContainerElementKey assignableKey = iterator.next();
assertEquals(List.class, assignableKey.getContainerClass());
assertEquals(0, assignableKey.getTypeArgumentIndex().intValue());
}
{
assertTrue(iterator.hasNext());
final ContainerElementKey assignableKey = iterator.next();
assertEquals(Collection.class, assignableKey.getContainerClass());
assertEquals(0, assignableKey.getTypeArgumentIndex().intValue());
}
{
assertTrue(iterator.hasNext());
final ContainerElementKey assignableKey = iterator.next();
assertEquals(Iterable.class, assignableKey.getContainerClass());
assertEquals(0, assignableKey.getTypeArgumentIndex().intValue());
assertTrue(assignableKey.getAnnotatedType().getType() instanceof TypeVariable<?>);
}
assertFalse(iterator.hasNext());
}
@Test
public void testTypeVariableInheritance() {
final ContainerElementKey containerElementKey =
new ContainerElementKey(stringsField.getAnnotatedType(), Integer.valueOf(0));
assertTrue(containerElementKey.represents(List.class.getTypeParameters()[0]));
assertTrue(containerElementKey.represents(Collection.class.getTypeParameters()[0]));
assertTrue(containerElementKey.represents(Iterable.class.getTypeParameters()[0]));
}
}
|
apache/bval
|
bval-jsr/src/test/java/org/apache/bval/jsr/metadata/ContainerElementKeyTest.java
|
Java
|
apache-2.0
| 4,874
|
# Copyright 2014 - Rackspace Hosting.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client as barbicanclient
from glanceclient.v2 import client as glanceclient
from heatclient.v1 import client as heatclient
from novaclient.v2 import client as novaclient
from oslo_config import cfg
from oslo_log import log as logging
from magnum.common import exception
from magnum.common import magnum_keystoneclient
from magnum.i18n import _
LOG = logging.getLogger(__name__)
magnum_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
heat_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.')),
cfg.StrOpt('ca_file',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.StrOpt('cert_file',
help=_('Optional PEM-formatted certificate chain file.')),
cfg.StrOpt('key_file',
help=_('Optional PEM-formatted file that contains the '
'private key.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate will not "
"be verified."))]
glance_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
barbican_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
nova_client_opts = [
cfg.StrOpt('region_name',
default=None,
help=_('Region in Identity service catalog to use for '
'communication with the OpenStack service.')),
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.'))]
cfg.CONF.register_opts(magnum_client_opts, group='magnum_client')
cfg.CONF.register_opts(heat_client_opts, group='heat_client')
cfg.CONF.register_opts(glance_client_opts, group='glance_client')
cfg.CONF.register_opts(barbican_client_opts, group='barbican_client')
cfg.CONF.register_opts(nova_client_opts, group='nova_client')
class OpenStackClients(object):
"""Convenience class to create and cache client instances."""
def __init__(self, context):
self.context = context
self._keystone = None
self._heat = None
self._glance = None
self._barbican = None
self._nova = None
def url_for(self, **kwargs):
return self.keystone().client.service_catalog.url_for(**kwargs)
def magnum_url(self):
endpoint_type = self._get_client_option('magnum', 'endpoint_type')
region_name = self._get_client_option('magnum', 'region_name')
return self.url_for(service_type='container',
endpoint_type=endpoint_type,
region_name=region_name)
@property
def auth_url(self):
return self.keystone().v3_endpoint
@property
def auth_token(self):
return self.context.auth_token or self.keystone().auth_token
def keystone(self):
if self._keystone:
return self._keystone
self._keystone = magnum_keystoneclient.KeystoneClientV3(self.context)
return self._keystone
def _get_client_option(self, client, option):
return getattr(getattr(cfg.CONF, '%s_client' % client), option)
@exception.wrap_keystone_exception
def heat(self):
if self._heat:
return self._heat
endpoint_type = self._get_client_option('heat', 'endpoint_type')
region_name = self._get_client_option('heat', 'region_name')
endpoint = self.url_for(service_type='orchestration',
endpoint_type=endpoint_type,
region_name=region_name)
args = {
'endpoint': endpoint,
'auth_url': self.auth_url,
'token': self.auth_token,
'username': None,
'password': None,
'ca_file': self._get_client_option('heat', 'ca_file'),
'cert_file': self._get_client_option('heat', 'cert_file'),
'key_file': self._get_client_option('heat', 'key_file'),
'insecure': self._get_client_option('heat', 'insecure')
}
self._heat = heatclient.Client(**args)
return self._heat
@exception.wrap_keystone_exception
def glance(self):
if self._glance:
return self._glance
endpoint_type = self._get_client_option('glance', 'endpoint_type')
region_name = self._get_client_option('glance', 'region_name')
endpoint = self.url_for(service_type='image',
endpoint_type=endpoint_type,
region_name=region_name)
args = {
'endpoint': endpoint,
'auth_url': self.auth_url,
'token': self.auth_token,
'username': None,
'password': None,
}
self._glance = glanceclient.Client(**args)
return self._glance
@exception.wrap_keystone_exception
def barbican(self):
if self._barbican:
return self._barbican
endpoint_type = self._get_client_option('barbican', 'endpoint_type')
region_name = self._get_client_option('barbican', 'region_name')
endpoint = self.url_for(service_type='key-manager',
endpoint_type=endpoint_type,
region_name=region_name)
session = self.keystone().client.session
self._barbican = barbicanclient.Client(session=session,
endpoint=endpoint)
return self._barbican
@exception.wrap_keystone_exception
def nova(self):
if self._nova:
return self._nova
endpoint_type = self._get_client_option('nova', 'endpoint_type')
region_name = self._get_client_option('nova', 'region_name')
endpoint = self.url_for(service_type='compute',
endpoint_type=endpoint_type,
region_name=region_name)
self._nova = novaclient.Client(auth_token=self.auth_token)
self._nova.client.management_url = endpoint
return self._nova
|
eshijia/magnum
|
magnum/common/clients.py
|
Python
|
apache-2.0
| 8,321
|
/* ************************************************************************
#
# DivConq
#
# http://divconq.com/
#
# Copyright:
# Copyright 2012 eTimeline, LLC. All rights reserved.
#
# License:
# See the license.txt file in the project's top-level directory for details.
#
# Authors:
# * Andy White
#
************************************************************************ */
package divconq.db.common;
import divconq.db.DataRequest;
import divconq.struct.FieldStruct;
import divconq.struct.RecordStruct;
/**
* Get the user id for a given user name.
*
* @author Andy
*
*/
public class UsernameLookupRequest extends DataRequest {
protected String name = null;
/**
* @return user name used in query
*/
public String getUsername() {
return this.name;
}
/**
* @param v user name used in query
*/
public void setUsername(String v) {
this.name = v;
}
/**
* @param username for use in query
*/
public UsernameLookupRequest(String username) {
super("dcUsernameLookup");
this.name = username;
}
/* (non-Javadoc)
* @see divconq.db.IDatabaseRequest#getDatabaseParams()
*/
@Override
public RecordStruct buildParams() {
return new RecordStruct(new FieldStruct("Username", (this.name != null) ? this.name.trim().toLowerCase() : null));
}
}
|
gspandy/divconq
|
divconq.core/src/main/java/divconq/db/common/UsernameLookupRequest.java
|
Java
|
apache-2.0
| 1,357
|
package com.alibaba.json.test.benchmark.basic;
import com.alibaba.fastjson.JSON;
import java.util.LinkedList;
/**
* Created by wenshao on 06/08/2017.
*/
public class LinkedListBenchmark {
public static void main(String[] args) throws Exception {
LinkedList linkedList = new LinkedList();
for (int i = 0; i < 1000; ++i) {
linkedList.add(i);
}
for (int i = 0; i < 10; i++) {
perf_toJSONString(linkedList); // 14825
}
}
public static void perf_toJSONString(Object obj) {
long start = System.currentTimeMillis();
for (int i = 0; i < 1000 * 1000; ++i) {
JSON.toJSONString(obj);
}
long millis = System.currentTimeMillis() - start;
System.out.println("milli : " + millis);
}
}
|
alibaba/fastjson
|
src/test/java/com/alibaba/json/test/benchmark/basic/LinkedListBenchmark.java
|
Java
|
apache-2.0
| 809
|
#
# Author:: Tyler Cloke (<tyler@chef.io>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
shared_examples_for "mandatory field missing" do
context "when field is nil" do
before do
knife.name_args = name_args
end
it "exits 1" do
expect { knife.run }.to raise_error(SystemExit)
end
it "prints the usage" do
expect(knife).to receive(:show_usage)
expect { knife.run }.to raise_error(SystemExit)
end
it "prints a relevant error message" do
expect { knife.run }.to raise_error(SystemExit)
expect(stderr.string).to match /You must specify a #{fieldname}/
end
end
end
|
higanworks/chef
|
spec/support/shared/unit/knife_shared.rb
|
Ruby
|
apache-2.0
| 1,224
|
//
// NSString+Contains.h
// mage-ios-sdk
//
//
#import <Foundation/Foundation.h>
@interface NSString (Contains)
- (BOOL)safeContainsString:(NSString*)other;
@end
|
ngageoint/mage-ios
|
sdk/NSString+Contains.h
|
C
|
apache-2.0
| 169
|
<?php
namespace Addons\Diy\Widget\Music;
use Addons\Diy\Controller\WidgetController;
class DealController extends WidgetController {
// widget的说明,必填写
function info() {
return array (
'hidden' => 1,
'title' => '音乐模块', // 必填,显示在选择widget显示的名称
'icon' => '' // 可为空,获取选择的模板的html代码,为空则使用通用的方法获取
);
}
// 模块参数配置
function param() {
return '';
}
// 模块解析
function show($widget) {
return $this->getWidgetHtml ( $widget );
}
}
|
kmarkcn/gogoda
|
Addons/Diy/Widget/Music/DealController.class.php
|
PHP
|
apache-2.0
| 595
|
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Some of the code in this class is derived from ccRtp's SRTP implementation,
* which has the following copyright notice:
*
* Copyright (C) 2004-2006 the Minisip Team
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jitsi.impl.neomedia.transform.srtp;
import java.util.*;
import javax.media.*;
import org.bouncycastle.crypto.params.*;
import org.jitsi.bccontrib.params.*;
import org.jitsi.impl.neomedia.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.libjitsi.*;
import org.jitsi.util.*;
/**
* SRTPCryptoContext class is the core class of SRTP implementation. There can
* be multiple SRTP sources in one SRTP session. And each SRTP stream has a
* corresponding SRTPCryptoContext object, identified by SSRC. In this way,
* different sources can be protected independently.
*
* SRTPCryptoContext class acts as a manager class and maintains all the
* information used in SRTP transformation. It is responsible for deriving
* encryption/salting/authentication keys from master keys. And it will invoke
* certain class to encrypt/decrypt (transform/reverse transform) RTP packets.
* It will hold a replay check db and do replay check against incoming packets.
*
* Refer to section 3.2 in RFC3711 for detailed description of cryptographic
* context.
*
* Cryptographic related parameters, i.e. encryption mode / authentication mode,
* master encryption key and master salt key are determined outside the scope of
* SRTP implementation. They can be assigned manually, or can be assigned
* automatically using some key management protocol, such as MIKEY (RFC3830),
* SDES (RFC4568) or Phil Zimmermann's ZRTP protocol (RFC6189).
*
* @author Bing SU (nova.su@gmail.com)
* @author Lyubomir Marinov
*/
public class SRTPCryptoContext
extends BaseSRTPCryptoContext
{
/**
* The name of the <tt>boolean</tt> <tt>ConfigurationService</tt> property
* which indicates whether protection against replay attacks is to be
* activated. The default value is <tt>true</tt>.
*/
public static final String CHECK_REPLAY_PNAME
= SRTPCryptoContext.class.getName() + ".checkReplay";
/**
* The indicator which determines whether protection against replay attacks
* is to be activated. The default value is <tt>true</tt>.
*/
private static boolean checkReplay = true;
/**
* The <tt>Logger</tt> used by the <tt>SRTPCryptoContext</tt> class and its
* instances to print out debug information.
*/
private static final Logger logger
= Logger.getLogger(SRTPCryptoContext.class);
/**
* The indicator which determines whether the method
* {@link #readConfigurationServicePropertiesOnce()} is to read the values
* of certain <tt>ConfigurationService</tt> properties of concern to
* <tt>SRTPCryptoContext</tt> once during the initialization of the first
* instance.
*/
private static boolean readConfigurationServicePropertiesOnce = true;
/**
* Reads the values of certain <tt>ConfigurationService</tt> properties of
* concern to <tt>SRTPCryptoContext</tt> once during the initialization of
* the first instance.
*/
private static synchronized void readConfigurationServicePropertiesOnce()
{
if (readConfigurationServicePropertiesOnce)
readConfigurationServicePropertiesOnce = false;
else
return;
ConfigurationService cfg = LibJitsi.getConfigurationService();
if (cfg != null)
checkReplay = cfg.getBoolean(CHECK_REPLAY_PNAME, checkReplay);
}
/**
* For the receiver only, the rollover counter guessed from the sequence
* number of the received packet that is currently being processed (i.e. the
* value is valid during the execution of
* {@link #reverseTransformPacket(RawPacket)} only.) RFC 3711 refers to it
* by the name <tt>v</tt>.
*/
private int guessedROC;
/**
* Key Derivation Rate, used to derive session keys from master keys
*/
private final long keyDerivationRate;
/**
* RFC 3711: a 32-bit unsigned rollover counter (ROC), which records how
* many times the 16-bit RTP sequence number has been reset to zero after
* passing through 65,535. Unlike the sequence number (SEQ), which SRTP
* extracts from the RTP packet header, the ROC is maintained by SRTP as
* described in Section 3.3.1.
*/
private int roc;
/**
* RFC 3711: for the receiver only, a 16-bit sequence number <tt>s_l</tt>,
* which can be thought of as the highest received RTP sequence number (see
* Section 3.3.1 for its handling), which SHOULD be authenticated since
* message authentication is RECOMMENDED.
*/
private int s_l = 0;
/**
* The indicator which determines whether this instance is used by an SRTP
* sender (<tt>true</tt>) or receiver (<tt>false</tt>).
*/
private final boolean sender;
/**
* The indicator which determines whether {@link #s_l} has seen set i.e.
* appropriately initialized.
*/
private boolean seqNumSet = false;
/**
* Constructs an empty SRTPCryptoContext using ssrc. The other parameters
* are set to default null value.
*
* @param sender <tt>true</tt> if the new instance is to be used by an SRTP
* sender; <tt>false</tt> if the new instance is to be used by an SRTP
* receiver
* @param ssrc SSRC of this SRTPCryptoContext
*/
public SRTPCryptoContext(boolean sender, int ssrc)
{
super(ssrc);
this.sender = sender;
keyDerivationRate = 0;
roc = 0;
}
/**
* Constructs a normal SRTPCryptoContext based on the given parameters.
*
* @param sender <tt>true</tt> if the new instance is to be used by an SRTP
* sender; <tt>false</tt> if the new instance is to be used by an SRTP
* receiver
* @param ssrc the RTP SSRC that this SRTP cryptographic context protects.
* @param roc the initial Roll-Over-Counter according to RFC 3711. These
* are the upper 32 bit of the overall 48 bit SRTP packet index. Refer to
* chapter 3.2.1 of the RFC.
* @param keyDerivationRate the key derivation rate defines when to
* recompute the SRTP session keys. Refer to chapter 4.3.1 in the RFC.
* @param masterK byte array holding the master key for this SRTP
* cryptographic context. Refer to chapter 3.2.1 of the RFC about the role
* of the master key.
* @param masterS byte array holding the master salt for this SRTP
* cryptographic context. It is used to computer the initialization vector
* that in turn is input to compute the session key, session authentication
* key and the session salt.
* @param policy SRTP policy for this SRTP cryptographic context, defined
* the encryption algorithm, the authentication algorithm, etc
*/
@SuppressWarnings("fallthrough")
public SRTPCryptoContext(
boolean sender,
int ssrc,
int roc,
long keyDerivationRate,
byte[] masterK,
byte[] masterS,
SRTPPolicy policy)
{
super(ssrc, masterK, masterS, policy);
this.sender = sender;
this.roc = roc;
this.keyDerivationRate = keyDerivationRate;
readConfigurationServicePropertiesOnce();
}
/**
* Authenticates a specific <tt>RawPacket</tt> if the <tt>policy</tt> of
* this <tt>SRTPCryptoContext</tt> specifies that authentication is to be
* performed.
*
* @param pkt the <tt>RawPacket</tt> to authenticate
* @return <tt>true</tt> if the <tt>policy</tt> of this
* <tt>SRTPCryptoContext</tt> specifies that authentication is to not be
* performed or <tt>pkt</tt> was successfully authenticated; otherwise,
* <tt>false</tt>
*/
private boolean authenticatePacket(RawPacket pkt)
{
boolean b = true;
if (policy.getAuthType() != SRTPPolicy.NULL_AUTHENTICATION)
{
int tagLength = policy.getAuthTagLength();
// get original authentication and store in tempStore
pkt.readRegionToBuff(
pkt.getLength() - tagLength,
tagLength,
tempStore);
pkt.shrink(tagLength);
// save computed authentication in tagStore
authenticatePacketHMAC(pkt, guessedROC);
for (int i = 0; i < tagLength; i++)
{
if ((tempStore[i] & 0xff) != (tagStore[i] & 0xff))
{
b = false;
break;
}
}
}
return b;
}
/**
* Checks if a packet is a replayed based on its sequence number. The method
* supports a 64 packet history relative the the specified sequence number.
* The sequence number is guaranteed to be real (i.e. not faked) through
* authentication.
*
* @param seqNo sequence number of the packet
* @param guessedIndex guessed ROC
* @return <tt>true</tt> if the specified sequence number indicates that the
* packet is not a replayed one; <tt>false</tt>, otherwise
*/
boolean checkReplay(int seqNo, long guessedIndex)
{
if (!checkReplay)
return true;
// Compute the index of the previously received packet and its delta to
// the newly received packet.
long localIndex = (((long) roc) << 16) | s_l;
long delta = guessedIndex - localIndex;
if (delta > 0)
{
return true; // Packet not received yet.
}
else if (-delta > REPLAY_WINDOW_SIZE)
{
if (sender)
{
logger.error(
"Discarding RTP packet with sequence number " + seqNo
+ ", SSRC " + Long.toString(0xFFFFFFFFL & ssrc)
+ " because it is outside the replay window! (roc "
+ roc + ", s_l " + s_l + ", guessedROC "
+ guessedROC);
}
return false; // Packet too old.
}
else if (((replayWindow >> (-delta)) & 0x1) != 0)
{
if (sender)
{
logger.error(
"Discarding RTP packet with sequence number " + seqNo
+ ", SSRC " + Long.toString(0xFFFFFFFFL & ssrc)
+ " because it has been received already! (roc "
+ roc + ", s_l " + s_l + ", guessedROC "
+ guessedROC);
}
return false; // Packet received already!
}
else
{
return true; // Packet not received yet.
}
}
/**
* Computes the initialization vector, used later by encryption algorithms,
* based on the label, the packet index, key derivation rate and master salt
* key.
*
* @param label label specified for each type of iv
* @param index 48bit RTP packet index
*/
private void computeIv(long label, long index)
{
long key_id;
if (keyDerivationRate == 0)
{
key_id = label << 48;
}
else
{
key_id = ((label << 48) | (index / keyDerivationRate));
}
for (int i = 0; i < 7; i++)
{
ivStore[i] = masterSalt[i];
}
for (int i = 7; i < 14; i++)
{
ivStore[i] = (byte)
(
(byte) (0xFF & (key_id >> (8 * (13 - i))))
^
masterSalt[i]
);
}
ivStore[14] = ivStore[15] = 0;
}
/**
* Derives a new SRTPCryptoContext for use with a new SSRC. The method
* returns a new SRTPCryptoContext initialized with the data of this
* SRTPCryptoContext. Replacing the SSRC, Roll-over-Counter, and the key
* derivation rate the application cab use this SRTPCryptoContext to
* encrypt/decrypt a new stream (Synchronization source) inside one RTP
* session. Before the application can use this SRTPCryptoContext it must
* call the deriveSrtpKeys method.
*
* @param ssrc The SSRC for this context
* @param roc The Roll-Over-Counter for this context
* @param deriveRate The key derivation rate for this context
* @return a new SRTPCryptoContext with all relevant data set.
*/
public SRTPCryptoContext deriveContext(int ssrc, int roc, long deriveRate)
{
return
new SRTPCryptoContext(
sender,
ssrc,
roc,
deriveRate,
masterKey,
masterSalt,
policy);
}
/**
* Derives the srtp session keys from the master key
*
* @param index the 48 bit SRTP packet index
*/
synchronized public void deriveSrtpKeys(long index)
{
// compute the session encryption key
computeIv(0x00, index);
cipher.init(true, new KeyParameter(masterKey));
Arrays.fill(masterKey, (byte) 0);
cipherCtr.getCipherStream(
cipher,
encKey, policy.getEncKeyLength(),
ivStore);
// compute the session authentication key
if (authKey != null)
{
computeIv(0x01, index);
cipherCtr.getCipherStream(
cipher,
authKey, policy.getAuthKeyLength(),
ivStore);
switch (policy.getAuthType())
{
case SRTPPolicy.HMACSHA1_AUTHENTICATION:
mac.init(new KeyParameter(authKey));
break;
case SRTPPolicy.SKEIN_AUTHENTICATION:
// Skein MAC uses number of bits as MAC size, not just bytes
mac.init(
new ParametersForSkein(
new KeyParameter(authKey),
ParametersForSkein.Skein512,
tagStore.length * 8));
break;
}
Arrays.fill(authKey, (byte) 0);
}
// compute the session salt
computeIv(0x02, index);
cipherCtr.getCipherStream(
cipher,
saltKey, policy.getSaltKeyLength(),
ivStore);
Arrays.fill(masterSalt, (byte) 0);
// As last step: initialize cipher with derived encryption key.
if (cipherF8 != null)
SRTPCipherF8.deriveForIV(cipherF8, encKey, saltKey);
cipher.init(true, new KeyParameter(encKey));
Arrays.fill(encKey, (byte) 0);
}
/**
* For the receiver only, determines/guesses the SRTP index of a received
* SRTP packet with a specific sequence number.
*
* @param seqNo the sequence number of the received SRTP packet
* @return the SRTP index of the received SRTP packet with the specified
* <tt>seqNo</tt>
*/
private long guessIndex(int seqNo)
{
if (s_l < 32768)
{
if (seqNo - s_l > 32768)
guessedROC = roc - 1;
else
guessedROC = roc;
}
else
{
if (s_l - 32768 > seqNo)
guessedROC = roc + 1;
else
guessedROC = roc;
}
return (((long) guessedROC) << 16) | seqNo;
}
/**
* Performs Counter Mode AES encryption/decryption
*
* @param pkt the RTP packet to be encrypted/decrypted
*/
public void processPacketAESCM(RawPacket pkt)
{
int ssrc = pkt.getSSRC();
int seqNo = pkt.getSequenceNumber();
long index = (((long) guessedROC) << 16) | seqNo;
// byte[] iv = new byte[16];
ivStore[0] = saltKey[0];
ivStore[1] = saltKey[1];
ivStore[2] = saltKey[2];
ivStore[3] = saltKey[3];
int i;
for (i = 4; i < 8; i++)
{
ivStore[i] = (byte)
(
(0xFF & (ssrc >> ((7 - i) * 8)))
^
saltKey[i]
);
}
for (i = 8; i < 14; i++)
{
ivStore[i] = (byte)
(
(0xFF & (byte) (index >> ((13 - i) * 8)))
^
saltKey[i]
);
}
ivStore[14] = ivStore[15] = 0;
int payloadOffset = pkt.getHeaderLength();
int payloadLength = pkt.getPayloadLength();
cipherCtr.process(
cipher,
pkt.getBuffer(), pkt.getOffset() + payloadOffset, payloadLength,
ivStore);
}
/**
* Performs F8 Mode AES encryption/decryption
*
* @param pkt the RTP packet to be encrypted/decrypted
*/
public void processPacketAESF8(RawPacket pkt)
{
// 11 bytes of the RTP header are the 11 bytes of the iv
// the first byte of the RTP header is not used.
System.arraycopy(pkt.getBuffer(), pkt.getOffset(), ivStore, 0, 12);
ivStore[0] = 0;
// set the ROC in network order into IV
int roc = guessedROC;
ivStore[12] = (byte) (roc >> 24);
ivStore[13] = (byte) (roc >> 16);
ivStore[14] = (byte) (roc >> 8);
ivStore[15] = (byte) roc;
int payloadOffset = pkt.getHeaderLength();
int payloadLength = pkt.getPayloadLength();
SRTPCipherF8.process(
cipher,
pkt.getBuffer(), pkt.getOffset() + payloadOffset, payloadLength,
ivStore,
cipherF8);
}
/**
* Transforms an SRTP packet into an RTP packet. The method is called when
* an SRTP packet is received. Operations done by the this operation
* include: authentication check, packet replay check and decryption. Both
* encryption and authentication functionality can be turned off as long as
* the SRTPPolicy used in this SRTPCryptoContext is requires no encryption
* and no authentication. Then the packet will be sent out untouched.
* However, this is not encouraged. If no SRTP feature is enabled, then we
* shall not use SRTP TransformConnector. We should use the original method
* (RTPManager managed transportation) instead.
*
* @param pkt the RTP packet that is just received
* @return <tt>true</tt> if the packet can be accepted; <tt>false</tt> if
* the packet failed authentication or failed replay check
*/
synchronized public boolean reverseTransformPacket(RawPacket pkt)
{
if (logger.isDebugEnabled())
{
logger.debug(
"Reverse transform for SSRC " + this.ssrc
+ " SeqNo=" + pkt.getSequenceNumber()
+ " s_l=" + s_l
+ " seqNumSet=" + seqNumSet
+ " guessedROC=" + guessedROC
+ " roc=" + roc);
}
int seqNo = pkt.getSequenceNumber();
if (!seqNumSet)
{
seqNumSet = true;
s_l = seqNo;
}
// Guess the SRTP index (48 bit), see RFC 3711, 3.3.1
// Stores the guessed rollover counter (ROC) in this.guessedROC.
long guessedIndex = guessIndex(seqNo);
boolean b = false;
// Replay control
if (checkReplay(seqNo, guessedIndex))
{
// Authenticate the packet.
if (authenticatePacket(pkt))
{
// If a RawPacket is flagged with Buffer.FLAG_DISCARD, then it
// should have been discarded earlier. Anyway, at least skip its
// decrypting. We flag a RawPacket with Buffer.FLAG_SILENCE when
// we want to ignore its payload. In the context of SRTP, we
// want to skip its decrypting.
if ((pkt.getFlags()
& (Buffer.FLAG_DISCARD | Buffer.FLAG_SILENCE))
== 0)
{
switch (policy.getEncType())
{
// Decrypt the packet using Counter Mode encryption.
case SRTPPolicy.AESCM_ENCRYPTION:
case SRTPPolicy.TWOFISH_ENCRYPTION:
processPacketAESCM(pkt);
break;
// Decrypt the packet using F8 Mode encryption.
case SRTPPolicy.AESF8_ENCRYPTION:
case SRTPPolicy.TWOFISHF8_ENCRYPTION:
processPacketAESF8(pkt);
break;
}
}
// Update the rollover counter and highest sequence number if
// necessary.
update(seqNo, guessedIndex);
b = true;
}
else if (logger.isDebugEnabled())
{
logger.debug("SRTP auth failed for SSRC " + ssrc);
}
}
return b;
}
/**
* Transforms an RTP packet into an SRTP packet. The method is called when a
* normal RTP packet ready to be sent. Operations done by the transformation
* may include: encryption, using either Counter Mode encryption, or F8 Mode
* encryption, adding authentication tag, currently HMC SHA1 method. Both
* encryption and authentication functionality can be turned off as long as
* the SRTPPolicy used in this SRTPCryptoContext is requires no encryption
* and no authentication. Then the packet will be sent out untouched.
* However, this is not encouraged. If no SRTP feature is enabled, then we
* shall not use SRTP TransformConnector. We should use the original method
* (RTPManager managed transportation) instead.
*
* @param pkt the RTP packet that is going to be sent out
*/
synchronized public boolean transformPacket(RawPacket pkt)
{
int seqNo = pkt.getSequenceNumber();
if (!seqNumSet)
{
seqNumSet = true;
s_l = seqNo;
}
// Guess the SRTP index (48 bit), see RFC 3711, 3.3.1
// Stores the guessed ROC in this.guessedROC
long guessedIndex = guessIndex(seqNo);
/*
* XXX The invocation of the checkReplay method here is not meant as
* replay protection but as a consistency check of our implementation.
*/
if (!checkReplay(seqNo, guessedIndex))
return false;
switch (policy.getEncType())
{
// Encrypt the packet using Counter Mode encryption.
case SRTPPolicy.AESCM_ENCRYPTION:
case SRTPPolicy.TWOFISH_ENCRYPTION:
processPacketAESCM(pkt);
break;
// Encrypt the packet using F8 Mode encryption.
case SRTPPolicy.AESF8_ENCRYPTION:
case SRTPPolicy.TWOFISHF8_ENCRYPTION:
processPacketAESF8(pkt);
break;
}
/* Authenticate the packet. */
if (policy.getAuthType() != SRTPPolicy.NULL_AUTHENTICATION)
{
authenticatePacketHMAC(pkt, guessedROC);
pkt.append(tagStore, policy.getAuthTagLength());
}
// Update the ROC if necessary.
update(seqNo, guessedIndex);
return true;
}
/**
* For the receiver only, updates the rollover counter (i.e. {@link #roc})
* and highest sequence number (i.e. {@link #s_l}) in this cryptographic
* context using the SRTP/packet index calculated by
* {@link #guessIndex(int)} and updates the replay list (i.e.
* {@link #replayWindow}). This method is called after all checks were
* successful.
*
* @param seqNo the sequence number of the accepted SRTP packet
* @param guessedIndex the SRTP index of the accepted SRTP packet calculated
* by <tt>guessIndex(int)</tt>
*/
private void update(int seqNo, long guessedIndex)
{
long delta = guessedIndex - ((((long) roc) << 16) | s_l);
/* Update the replay bit mask. */
if (delta > 0)
{
replayWindow <<= delta;
replayWindow |= 1;
}
else
{
replayWindow |= (1 << -delta);
}
if (guessedROC == roc)
{
if (seqNo > s_l)
s_l = seqNo & 0xffff;
}
else if (guessedROC == (roc + 1))
{
s_l = seqNo & 0xffff;
roc = guessedROC;
}
}
}
|
bgrozev/libjitsi
|
src/org/jitsi/impl/neomedia/transform/srtp/SRTPCryptoContext.java
|
Java
|
apache-2.0
| 26,112
|
---
title: AWS
---
{{site.prodname}} provides the following advantages when running in AWS:
- **Network Policy for Containers**: {{site.prodname}} provides fine-grained network security policy for individual containers.
- **No Overlays**: Within each VPC subnet {{site.prodname}} doesn't need an overlay, which means high performance networking for your containers.
- **No 50 Node Limit**: {{site.prodname}} allows you to surpass the 50 node limit, which exists as a consequence of the [AWS 50 route limit](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_Appendix_Limits.html#vpc-limits-route-tables) when using the VPC routing table.
## Requirements
To deploy {{site.prodname}} in AWS, you must ensure that the proper security group rules
have been made and that traffic between containers on different hosts is not
dropped by the VPC. There are a few different options for doing this depending
on your deployment.
#### Configure Security Groups
{{site.prodname}} requires the following security group exceptions to function properly
in AWS.
| Description | Type | Protocol | Port Range |
|:--------------|:----------------|:---------|:-----------|
| BGP | Custom TCP Rule | TCP | 179 |
| IPIP* | Custom Protocol | IPIP | all |
\* The IPIP exception is required only when using {{site.prodname}} with IPIP encapsulation. Keep reading
for information on when IPIP is required in AWS.
#### Routing Traffic Within a Single VPC Subnet
Since {{site.prodname}} assigns IP addresses outside the range used by AWS for EC2 instances, you must disable AWS src/dst
checks on each EC2 instance in your cluster
[as described in the AWS documentation](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_NAT_Instance.html#EIP_Disable_SrcDestCheck). This
allows {{site.prodname}} to route traffic natively within a single VPC subnet without using an overlay or any of the limited VPC routing table entries.
#### Routing Traffic Across Different VPC Subnets / VPCs
If you need to split your deployment across multiple AZs for high availability then each AZ will have its own VPC subnet. To
use {{site.prodname}} across multiple different VPC subnets or [peered VPCs](http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-peering.html),
in addition to disabling src/dst checks as described above you must also enable IPIP encapsulation and outgoing NAT on your {{site.prodname}} IP pools.
See the [IP pool configuration reference]({{site.baseurl}}/{{page.version}}/reference/calicoctl/resources/ippool)
for information on how to configure {{site.prodname}} IP pools.
By default, {{site.prodname}}'s IPIP encapsulation applies to all container-to-container traffic. However,
encapsulation is only required for container traffic that crosses a VPC subnet boundary. For better
performance, you can configure {{site.prodname}} to perform IPIP encapsulation only across VPC subnet boundaries.
To enable the "CrossSubnet" IPIP feature, configure your {{site.prodname}} IP pool resources
to enable IPIP and set the mode to "CrossSubnet".
> **Note**: This feature was introduced in Calico v2.1, if your deployment was created with
> an older version of Calico, or if you if you are unsure whether your deployment
> is configured correctly, follow the [Configuring IP-in-IP guide]({{site.baseurl}}/{{page.version}}/usage/configuration/ip-in-ip)
> which discusses this in more detail.
>
{: .alert .alert-info}
The following `calicoctl` command will create or modify an IPv4 pool with
CIDR 192.168.0.0/16 using IPIP mode `CrossSubnet`. Adjust the pool CIDR for your deployment.
```
$ calicoctl apply -f - << EOF
apiVersion: projectcalico.org/v3
kind: IPPool
metadata:
name: ippool-cs-1
spec:
cidr: 192.168.0.0/16
ipipMode: CrossSubnet
EOF
```
#### Enabling Workload-to-WAN Traffic
To allow {{site.prodname}} networked containers to reach resources outside of AWS,
you must configure outgoing NAT on your [{{site.prodname}} IP pool]({{site.baseurl}}/{{page.version}}/reference/calicoctl/resources/ippool).
AWS will perform outbound NAT on any traffic which has the source address of an EC2 virtual
machine instance. By enabling outgoing NAT on your {{site.prodname}} IP pool, {{site.prodname}} will
NAT any outbound traffic from the containers hosted on the EC2 virtual machine instances.
The following `calicoctl` command will create or modify an IPv4 pool with
CIDR 192.168.0.0/16 using IPIP mode `CrossSubnet` and enables outgoing NAT.
Adjust the pool CIDR for your deployment.
```
$ calicoctl apply -f - << EOF
apiVersion: projectcalico.org/v3
kind: IPPool
metadata:
name: ippool-1
spec:
cidr: 192.168.0.0/16
ipipMode: CrossSubnet
natOutgoing: true
EOF
```
|
gunjan5/calico
|
master/reference/public-cloud/aws.md
|
Markdown
|
apache-2.0
| 4,762
|
## Contribute
If you want to participate in the development, feel free to [contact us](team-list.html).
### Access to source code
You can also download the source code from our Git repository.
Following URL's will give you read-only access to our server:
```xml
ssh://gitlab@build.se.informatik.uni-kiel.de:teetime/teetime.git
```
or
```xml
http://build.se.informatik.uni-kiel.de/gitlab/teetime/teetime.git
```
Last, but not least, TeeTime is also available on [GitHub](https://github.com/teetime-framework/teetime).
### Workflow
If you contribute, please follow our guidelines to comply to our quality standards.
#### Git guidelines
- To update a branch, use the code commands ``fetch`` and ``rebase`` in this order.
- Create a new branch for every code modification you want to perform.
- Contribute your changes and fixes by using the [merge request](https://build.se.informatik.uni-kiel.de/gitlab/teetime/teetime/merge_requests) function in GitLab.
- Last, but not least, participate in discussions within our issue tracking system.
#### Issue tracking
- Create a ticket for every major bug.
- Comment on tickets you are working on and let others know what you are doing.
- Describe the issue in a short but understandable way.
- Use Tags for tickets, which fit the issue and mark bugs as ``solved``.
### Support
If you are using TeeTime and run into problems, consider to report the issue or [contact us](team-list.html). Every reported issue can help to improve TeeTime.
We really appreciate any ideas or suggestions.
|
ChristianWulf/teetime
|
src/site/markdown/contribute.md
|
Markdown
|
apache-2.0
| 1,551
|
/**
*
*/
package edu.berkeley.nlp.tokenizer;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Iterator;
import java.util.List;
import edu.berkeley.nlp.util.StringUtils;
/**
* Tokenizer implementation that conforms to the Penn Treebank tokenization
* conventions. This tokenizer is a Java implementation of Professor Chris
* Manning's Flex tokenizer, pgtt-treebank.l. It reads raw text and outputs
* tokens as edu.stanford.nlp.trees.Words in the Penn treebank format. It can
* optionally return carriage returns as tokens.
*
* @author Teg Grenager (grenager@stanford.edu)
*/
public class PTBTokenizer extends AbstractTokenizer {
// whether carriage returns should be returned as tokens
private boolean tokenizeCRs;
// the underlying lexer
PTBLexer lexer;
/**
* Constructs a new PTBTokenizer that treats carriage returns as normal
* whitespace. No source is specified, so hasNext() will return false.
*/
public PTBTokenizer() {
this(false);
}
/**
* Constructs a new PTBTokenizer that optionally returns carriage returns as
* their own token. CRs come back as Words whose text is the value of
* <code>PTBLexer.cr</code>.
*/
public PTBTokenizer(boolean tokenizeCRs) {
this.tokenizeCRs = tokenizeCRs;
}
/**
* Constructs a new PTBTokenizer that treats carriage returns as normal
* whitespace.
*/
public PTBTokenizer(Reader r) {
this(r, false);
}
/**
* Constructs a new PTBTokenizer that optionally returns carriage returns as
* their own token. CRs come back as Words whose text is the value of
* <code>PTBLexer.cr</code>.
*/
public PTBTokenizer(Reader r, boolean tokenizeCRs) {
this.tokenizeCRs = tokenizeCRs;
setSource(r);
}
/**
* Get the next valid Word from the lexer if possible.
*/
@Override
protected Object getNext() {
if (lexer == null) {
return null;
}
Object token = null;
try {
token = lexer.next();
// get rid of CRs if necessary
while (!tokenizeCRs && PTBLexer.cr.equals(token))
token = lexer.next();
} catch (Exception e) {
nextToken = null;
}
return token;
}
/**
* Reads a file from the argument and prints its tokens one per line. This
* is mainly as a testing aid, but it can also be quite useful standalone to
* turn a corpus into a one token per line file of tokens.
* <p>
* Usage: <code>java edu.stanford.nlp.process.PTBTokenizer filename
* </code>
*
* @param args
* Command line arguments
*/
public static void main(String[] args) throws IOException {
if (args.length < 1) {
System.err.println("usage: java edu.berkeley.nlp.io."
+ "PTBTokenizer [-cr] filename");
return;
}
PTBTokenizer tokenizer = new PTBTokenizer(new FileReader(
args[args.length - 1]), "-cr".equals(args[0]));
List words = tokenizer.tokenize();
for (int i = 0; i < words.size(); i++)
System.out.println(words.get(i));
}
/**
* Sets the source of this Tokenizer to be the Reader r.
*/
public void setSource(Reader r) {
lexer = new PTBLexer(r);
}
/**
* Returns a presentable version of the given PTB-tokenized text. PTB
* tokenization splits up punctuation and does various other things that
* makes simply joining the tokens with spaces look bad. So join the tokens
* with space and run it through this method to produce nice looking text.
* It's not perfect, but it works pretty well.
*/
public static String ptb2Text(String ptbText) {
StringBuffer sb = new StringBuffer(ptbText.length()); // probably an
// overestimate
PTB2TextLexer lexer = new PTB2TextLexer(new StringReader(ptbText));
String token;
try {
while ((token = lexer.next()) != null)
sb.append(token);
} catch (IOException e) {
e.printStackTrace();
}
return (sb.toString());
}
/**
* Returns a presentable version of the given PTB-tokenized words. Pass in a
* List of Words or Strings, or a Document and this method will join the
* words with spaces and call {@link #ptb2Text(String) } on the output. This
* method will check if the elements in the list are subtypes of Word, and
* if so, it will take the word() values to prevent additional text from
* creeping in (e.g., POS tags). Otherwise the toString value will be used.
*/
public static String ptb2Text(List ptbWords) {
for (int i = 0; i < ptbWords.size(); i++)
if (ptbWords.get(i) instanceof String)
ptbWords.set(i, (ptbWords.get(i)));
return (ptb2Text(StringUtils.join(ptbWords)));
}
public static TokenizerFactory factory() {
return new PTBTokenizerFactory();
}
public static TokenizerFactory factory(boolean tokenizeCRs) {
return new PTBTokenizerFactory(tokenizeCRs);
}
static class PTBTokenizerFactory implements TokenizerFactory {
protected boolean tokenizeCRs;
/**
* Constructs a new PTBTokenizerFactory that treats carriage returns as
* normal whitespace.
*/
public PTBTokenizerFactory() {
this(false);
}
/**
* Constructs a new PTBTokenizer that optionally returns carriage
* returns as their own token. CRs come back as Words whose text is the
* value of <code>PTBLexer.cr</code>.
*/
public PTBTokenizerFactory(boolean tokenizeCRs) {
this.tokenizeCRs = tokenizeCRs;
}
public Iterator getIterator(Reader r) {
return getTokenizer(r);
}
public Tokenizer getTokenizer(Reader r) {
return new PTBTokenizer(r, tokenizeCRs);
}
}
}
|
text-machine-lab/CliRel
|
model/kim/berkeleyparser/src/edu/berkeley/nlp/tokenizer/PTBTokenizer.java
|
Java
|
apache-2.0
| 5,463
|
package eu.xenit.de.testing.greeting;
public class GreetingServiceWrapper {
private final GreetingService greetingService;
public GreetingServiceWrapper(GreetingService greetingService) {
this.greetingService = greetingService;
}
public GreetingService getGreetingService() {
return greetingService;
}
}
|
laurentvdl/dynamic-extensions-for-alfresco
|
integration-tests/test-bundle/src/main/java/eu/xenit/de/testing/greeting/GreetingServiceWrapper.java
|
Java
|
apache-2.0
| 344
|
package ctxlogrus_test
import (
"context"
"github.com/grpc-ecosystem/go-grpc-middleware/logging/logrus/ctxlogrus"
"github.com/grpc-ecosystem/go-grpc-middleware/tags"
)
// Simple unary handler that adds custom fields to the requests's context. These will be used for all log statements.
func ExampleExtract_unary() {
ctx := context.Background()
// setting tags will be added to the logger as log fields
grpc_ctxtags.Extract(ctx).Set("custom_tags.string", "something").Set("custom_tags.int", 1337)
// Extract a single request-scoped logrus.Logger and log messages.
l := ctxlogrus.Extract(ctx)
l.Info("some ping")
l.Info("another ping")
}
|
grpc-ecosystem/go-grpc-middleware
|
logging/logrus/ctxlogrus/examples_test.go
|
GO
|
apache-2.0
| 649
|
#ifndef BOOST_MPL_ALWAYS_HPP_INCLUDED
#define BOOST_MPL_ALWAYS_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2001-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Source$
// $Date: 2004-09-02 11:41:37 -0400 (Thu, 02 Sep 2004) $
// $Revision: 24874 $
#include <boost/mpl/aux_/preprocessor/def_params_tail.hpp>
#include <boost/mpl/aux_/na.hpp>
#include <boost/mpl/aux_/arity_spec.hpp>
namespace boost { namespace mpl {
template< typename Value > struct always
{
template<
typename T
BOOST_MPL_PP_NESTED_DEF_PARAMS_TAIL(1, typename T, na)
>
struct apply
{
typedef Value type;
};
};
BOOST_MPL_AUX_ARITY_SPEC(1, always)
}}
#endif // BOOST_MPL_ALWAYS_HPP_INCLUDED
|
jaredhoberock/gotham
|
windows/include/boost/mpl/always.hpp
|
C++
|
apache-2.0
| 933
|
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.savings.business;
import java.util.Date;
import org.mifos.accounts.business.AccountActionEntity;
import org.mifos.accounts.business.AccountPaymentEntity;
import org.mifos.accounts.util.helpers.AccountActionTypes;
import org.mifos.customers.business.CustomerBO;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.framework.util.helpers.Money;
/**
*
*/
public class SavingsTransactionActivityHelperImpl implements SavingsTransactionActivityHelper {
@Override
public SavingsActivityEntity createSavingsActivityForDeposit(final PersonnelBO createdBy,
final Money amountToDeposit, final Money savingsBalance, final Date createdDate,
final SavingsBO savingsAccount) {
final AccountActionEntity savingsAccountDepositAction = new AccountActionEntity(
AccountActionTypes.SAVINGS_DEPOSIT);
return new SavingsActivityEntity(createdBy, savingsAccountDepositAction, amountToDeposit, savingsBalance,
createdDate, savingsAccount);
}
@Override
public SavingsActivityEntity createSavingsActivityForWithdrawal(final AccountPaymentEntity payment,
final Money savingsBalance, final SavingsBO savingsBO) {
final AccountActionEntity savingsAccountWithdrawalAction = new AccountActionEntity(
AccountActionTypes.SAVINGS_WITHDRAWAL);
final SavingsActivityEntity savingsActivity = new SavingsActivityEntity(payment.getCreatedByUser(),
savingsAccountWithdrawalAction, payment.getAmount(), savingsBalance, payment.getPaymentDate(),
savingsBO);
return savingsActivity;
}
@Override
public SavingsTrxnDetailEntity createSavingsTrxnForDeposit(final AccountPaymentEntity payment, final Money amount,
final CustomerBO payingCustomer, final SavingsScheduleEntity savingsInstallment, final Money savingsBalance) {
final Date transactionDate = payment.getPaymentDate();
final PersonnelBO createdBy = payment.getCreatedByUser();
Date dueDate = transactionDate;
Short installmentNumber = null;
if (savingsInstallment != null) {
dueDate = savingsInstallment.getActionDate();
installmentNumber = savingsInstallment.getInstallmentId();
}
final SavingsTrxnDetailEntity accountTrxnBO = SavingsTrxnDetailEntity.savingsDeposit(payment, payingCustomer, savingsBalance, amount
, createdBy, dueDate, dueDate, transactionDate, installmentNumber);
return accountTrxnBO;
}
@Override
public SavingsTrxnDetailEntity createSavingsTrxnForWithdrawal(final AccountPaymentEntity payment,
final Money amountToWithdraw, final CustomerBO payingCustomer, final Money accountBalance) {
final Date transactionDate = payment.getPaymentDate();
final PersonnelBO createdBy = payment.getCreatedByUser();
return SavingsTrxnDetailEntity.savingsWithdrawal(payment, payingCustomer, accountBalance, amountToWithdraw, createdBy, transactionDate, transactionDate, transactionDate);
// return new SavingsTrxnDetailEntity(payment, payingCustomer, AccountActionTypes.SAVINGS_WITHDRAWAL,
// amountToWithdraw, accountBalance, createdBy, transactionDate, transactionDate, null, "",
// new SavingsPersistence());
}
}
|
maduhu/mifos-head
|
application/src/main/java/org/mifos/accounts/savings/business/SavingsTransactionActivityHelperImpl.java
|
Java
|
apache-2.0
| 4,159
|
/*
* MinIO .NET Library for Amazon S3 Compatible Cloud Storage, (C) 2017 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Minio.Exceptions
{
public class DeleteObjectException : MinioException
{
public DeleteObjectException(string message) : base(message)
{
}
}
}
|
minio/minio-dotnet
|
Minio/Exceptions/DeleteObjectException.cs
|
C#
|
apache-2.0
| 837
|
// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include <cstring>
#include <string>
#include "gen_cpp/PaloInternalService_types.h"
#include "olap_scanner.h"
#include "olap_scan_node.h"
#include "olap_utils.h"
#include "olap/olap_reader.h"
#include "olap/field.h"
#include "service/backend_options.h"
#include "runtime/descriptors.h"
#include "runtime/runtime_state.h"
#include "runtime/mem_pool.h"
#include "runtime/mem_tracker.h"
#include "util/mem_util.hpp"
#include "util/network_util.h"
#include "util/palo_metrics.h"
namespace palo {
static const std::string SCANNER_THREAD_TOTAL_WALLCLOCK_TIME =
"ScannerThreadsTotalWallClockTime";
static const std::string MATERIALIZE_TUPLE_TIMER =
"MaterializeTupleTime(*)";
OlapScanner::OlapScanner(
RuntimeState* runtime_state,
OlapScanNode* parent,
bool aggregation,
PaloScanRange* scan_range,
const std::vector<OlapScanRange>& key_ranges)
: _runtime_state(runtime_state),
_parent(parent),
_tuple_desc(parent->_tuple_desc),
_profile(parent->runtime_profile()),
_string_slots(parent->_string_slots),
_is_open(false),
_aggregation(aggregation),
_tuple_idx(parent->_tuple_idx),
_direct_conjunct_size(parent->_direct_conjunct_size) {
_reader.reset(new Reader());
DCHECK(_reader.get() != NULL);
_ctor_status = _prepare(scan_range, key_ranges, parent->_olap_filter, parent->_is_null_vector);
if (!_ctor_status.ok()) {
LOG(WARNING) << "OlapScanner preapre failed, status:" << _ctor_status.get_error_msg();
}
_rows_read_counter = parent->rows_read_counter();
_rows_pushed_cond_filtered_counter = parent->_rows_pushed_cond_filtered_counter;
}
OlapScanner::~OlapScanner() {
}
Status OlapScanner::_prepare(
PaloScanRange* scan_range, const std::vector<OlapScanRange>& key_ranges,
const std::vector<TCondition>& filters, const std::vector<TCondition>& is_nulls) {
// Get olap table
TTabletId tablet_id = scan_range->scan_range().tablet_id;
SchemaHash schema_hash =
strtoul(scan_range->scan_range().schema_hash.c_str(), nullptr, 10);
_version =
strtoul(scan_range->scan_range().version.c_str(), nullptr, 10);
VersionHash version_hash =
strtoul(scan_range->scan_range().version_hash.c_str(), nullptr, 10);
{
_olap_table = OLAPEngine::get_instance()->get_table(tablet_id, schema_hash);
if (_olap_table.get() == nullptr) {
OLAP_LOG_WARNING("table does not exists. [tablet_id=%ld schema_hash=%d]",
tablet_id, schema_hash);
return Status("table does not exists");
}
{
AutoRWLock auto_lock(_olap_table->get_header_lock_ptr(), true);
const FileVersionMessage* message = _olap_table->latest_version();
if (message == NULL) {
OLAP_LOG_WARNING("fail to get latest version. [tablet_id=%ld]", tablet_id);
return Status("fail to get latest version");
}
if (message->end_version() == _version
&& message->version_hash() != version_hash) {
OLAP_LOG_WARNING("fail to check latest version hash. "
"[tablet_id=%ld version_hash=%ld request_version_hash=%ld]",
tablet_id, message->version_hash(), version_hash);
return Status("fail to check version hash");
}
}
}
// Initialize _params
{
RETURN_IF_ERROR(_init_params(key_ranges, filters, is_nulls));
}
return Status::OK;
}
Status OlapScanner::open() {
RETURN_IF_ERROR(_ctor_status);
if (_conjunct_ctxs.size() > _direct_conjunct_size) {
_use_pushdown_conjuncts = true;
}
auto res = _reader->init(_params);
if (res != OLAP_SUCCESS) {
OLAP_LOG_WARNING("fail to init reader.[res=%d]", res);
return Status("failed to initialize storage reader");
}
return Status::OK;
}
Status OlapScanner::_init_params(
const std::vector<OlapScanRange>& key_ranges,
const std::vector<TCondition>& filters,
const std::vector<TCondition>& is_nulls) {
RETURN_IF_ERROR(_init_return_columns());
_params.olap_table = _olap_table;
_params.reader_type = READER_FETCH;
_params.aggregation = _aggregation;
_params.version = Version(0, _version);
// Condition
for (auto& filter : filters) {
_params.conditions.push_back(filter);
}
for (auto& is_null_str : is_nulls) {
_params.conditions.push_back(is_null_str);
}
// Range
for (auto& key_range : key_ranges) {
if (key_range.begin_scan_range.size() == 1 &&
key_range.begin_scan_range[0] == NEGATIVE_INFINITY) {
continue;
}
_params.range = (key_range.begin_include ? "ge" : "gt");
_params.end_range = (key_range.end_include ? "le" : "lt");
TFetchStartKey start_key;
for (auto key : key_range.begin_scan_range) {
start_key.key.push_back(key);
}
_params.start_key.push_back(start_key);
TFetchEndKey end_key;
for (auto key : key_range.end_scan_range) {
end_key.key.push_back(key);
}
_params.end_key.push_back(end_key);
}
// TODO(zc)
_params.profile = _profile;
_params.runtime_state = _runtime_state;
if (_aggregation) {
_params.return_columns = _return_columns;
} else {
for (size_t i = 0; i < _olap_table->num_key_fields(); ++i) {
_params.return_columns.push_back(i);
}
for (auto index : _return_columns) {
if (_olap_table->tablet_schema()[index].is_key) {
continue;
} else {
_params.return_columns.push_back(index);
}
}
}
// use _params.return_columns, because reader use this to merge sort
OLAPStatus res = _read_row_cursor.init(_olap_table->tablet_schema(), _params.return_columns);
if (res != OLAP_SUCCESS) {
OLAP_LOG_WARNING("fail to init row cursor.[res=%d]", res);
return Status("failed to initialize storage read row cursor");
}
_read_row_cursor.allocate_memory_for_string_type(_olap_table->tablet_schema());
for (auto cid : _return_columns) {
_query_fields.push_back(_read_row_cursor.get_field_by_index(cid));
}
return Status::OK;
}
Status OlapScanner::_init_return_columns() {
for (auto slot : _tuple_desc->slots()) {
if (!slot->is_materialized()) {
continue;
}
int32_t index = _olap_table->get_field_index(slot->col_name());
if (index < 0) {
std::stringstream ss;
ss << "field name is invalied. field=" << slot->col_name();
LOG(WARNING) << ss.str();
return Status(ss.str());
}
_return_columns.push_back(index);
if (_olap_table->tablet_schema()[index].type == OLAP_FIELD_TYPE_VARCHAR ||
_olap_table->tablet_schema()[index].type == OLAP_FIELD_TYPE_HLL) {
_request_columns_size.push_back(
_olap_table->tablet_schema()[index].length - sizeof(StringLengthType));
} else {
_request_columns_size.push_back(_olap_table->tablet_schema()[index].length);
}
_query_slots.push_back(slot);
}
if (_return_columns.empty()) {
return Status("failed to build storage scanner, no materialized slot!");
}
return Status::OK;
}
Status OlapScanner::get_batch(
RuntimeState* state, RowBatch* batch, bool* eof) {
// 2. Allocate Row's Tuple buf
uint8_t *tuple_buf = batch->tuple_data_pool()->allocate(
state->batch_size() * _tuple_desc->byte_size());
bzero(tuple_buf, state->batch_size() * _tuple_desc->byte_size());
Tuple *tuple = reinterpret_cast<Tuple*>(tuple_buf);
int64_t raw_rows_threshold = raw_rows_read() + config::palo_scanner_row_num;
{
SCOPED_TIMER(_parent->_scan_timer);
while (true) {
// Batch is full, break
if (batch->is_full()) {
break;
}
// Read one row from reader
auto res = _reader->next_row_with_aggregation(&_read_row_cursor, eof);
if (res != OLAP_SUCCESS) {
return Status("Internal Error: read storage fail.");
}
// If we reach end of this scanner, break
if (UNLIKELY(*eof)) {
break;
}
_num_rows_read++;
_convert_row_to_tuple(tuple);
if (VLOG_ROW_IS_ON) {
VLOG_ROW << "OlapScanner input row: " << print_tuple(tuple, *_tuple_desc);
}
// 3.4 Set tuple to RowBatch(not commited)
int row_idx = batch->add_row();
TupleRow* row = batch->get_row(row_idx);
row->set_tuple(_tuple_idx, tuple);
do {
// 3.5.1 Using direct conjuncts to filter data
if (_eval_conjuncts_fn != nullptr) {
if (!_eval_conjuncts_fn(&_conjunct_ctxs[0], _direct_conjunct_size, row)) {
// check direct conjuncts fail then clear tuple for reuse
// make sure to reset null indicators since we're overwriting
// the tuple assembled for the previous row
tuple->init(_tuple_desc->byte_size());
break;
}
} else {
if (!ExecNode::eval_conjuncts(&_conjunct_ctxs[0], _direct_conjunct_size, row)) {
// check direct conjuncts fail then clear tuple for reuse
// make sure to reset null indicators since we're overwriting
// the tuple assembled for the previous row
tuple->init(_tuple_desc->byte_size());
break;
}
}
// 3.5.2 Using pushdown conjuncts to filter data
if (_use_pushdown_conjuncts) {
if (!ExecNode::eval_conjuncts(
&_conjunct_ctxs[_direct_conjunct_size],
_conjunct_ctxs.size() - _direct_conjunct_size, row)) {
// check pushdown conjuncts fail then clear tuple for reuse
// make sure to reset null indicators since we're overwriting
// the tuple assembled for the previous row
tuple->init(_tuple_desc->byte_size());
_num_rows_pushed_cond_filtered++;
break;
}
}
// Copy string slot
for (auto desc : _string_slots) {
StringValue* slot = tuple->get_string_slot(desc->tuple_offset());
if (slot->len != 0) {
uint8_t* v = batch->tuple_data_pool()->allocate(slot->len);
memory_copy(v, slot->ptr, slot->len);
slot->ptr = reinterpret_cast<char*>(v);
}
}
if (VLOG_ROW_IS_ON) {
VLOG_ROW << "OlapScanner output row: " << print_tuple(tuple, *_tuple_desc);
}
// check direct && pushdown conjuncts success then commit tuple
batch->commit_last_row();
char* new_tuple = reinterpret_cast<char*>(tuple);
new_tuple += _tuple_desc->byte_size();
tuple = reinterpret_cast<Tuple*>(new_tuple);
// compute pushdown conjuncts filter rate
if (_use_pushdown_conjuncts) {
// check this rate after
if (_num_rows_read > 32768) {
int32_t pushdown_return_rate
= _num_rows_read * 100 / (_num_rows_read + _num_rows_pushed_cond_filtered);
if (pushdown_return_rate > config::palo_max_pushdown_conjuncts_return_rate) {
_use_pushdown_conjuncts = false;
VLOG(2) << "Stop Using PushDown Conjuncts. "
<< "PushDownReturnRate: " << pushdown_return_rate << "%"
<< " MaxPushDownReturnRate: "
<< config::palo_max_pushdown_conjuncts_return_rate << "%";
}
}
}
} while (false);
if (raw_rows_read() >= raw_rows_threshold) {
break;
}
}
}
return Status::OK;
}
void OlapScanner::_convert_row_to_tuple(Tuple* tuple) {
char* row = _read_row_cursor.get_buf();
size_t slots_size = _query_slots.size();
for (int i = 0; i < slots_size; ++i) {
SlotDescriptor* slot_desc = _query_slots[i];
const Field* field = _query_fields[i];
if (field->is_null(row)) {
tuple->set_null(slot_desc->null_indicator_offset());
continue;
}
char* ptr = (char*)field->get_ptr(row);
size_t len = field->size();
switch (slot_desc->type().type) {
case TYPE_CHAR: {
StringSlice* slice = reinterpret_cast<StringSlice*>(ptr);
StringValue *slot = tuple->get_string_slot(slot_desc->tuple_offset());
slot->ptr = slice->data;
slot->len = strnlen(slot->ptr, slice->size);
break;
}
case TYPE_VARCHAR:
case TYPE_HLL: {
StringSlice* slice = reinterpret_cast<StringSlice*>(ptr);
StringValue *slot = tuple->get_string_slot(slot_desc->tuple_offset());
slot->ptr = slice->data;
slot->len = slice->size;
break;
}
case TYPE_DECIMAL: {
DecimalValue *slot = tuple->get_decimal_slot(slot_desc->tuple_offset());
// TODO(lingbin): should remove this assign, use set member function
int64_t int_value = *(int64_t*)(ptr);
int32_t frac_value = *(int32_t*)(ptr + sizeof(int64_t));
*slot = DecimalValue(int_value, frac_value);
break;
}
case TYPE_DATETIME: {
DateTimeValue *slot = tuple->get_datetime_slot(slot_desc->tuple_offset());
uint64_t value = *reinterpret_cast<uint64_t*>(ptr);
if (!slot->from_olap_datetime(value)) {
tuple->set_null(slot_desc->null_indicator_offset());
}
break;
}
case TYPE_DATE: {
DateTimeValue *slot = tuple->get_datetime_slot(slot_desc->tuple_offset());
uint64_t value = 0;
value = *(unsigned char*)(ptr + 2);
value <<= 8;
value |= *(unsigned char*)(ptr + 1);
value <<= 8;
value |= *(unsigned char*)(ptr);
if (!slot->from_olap_date(value)) {
tuple->set_null(slot_desc->null_indicator_offset());
}
break;
}
default: {
void *slot = tuple->get_slot(slot_desc->tuple_offset());
memory_copy(slot, ptr, len);
break;
}
}
}
}
void OlapScanner::update_counter() {
if (_has_update_counter) {
return;
}
COUNTER_UPDATE(_rows_read_counter, _num_rows_read);
COUNTER_UPDATE(_rows_pushed_cond_filtered_counter, _num_rows_pushed_cond_filtered);
COUNTER_UPDATE(_parent->_io_timer, _reader->stats().io_ns);
COUNTER_UPDATE(_parent->_read_compressed_counter, _reader->stats().compressed_bytes_read);
COUNTER_UPDATE(_parent->_decompressor_timer, _reader->stats().decompress_ns);
COUNTER_UPDATE(_parent->_read_uncompressed_counter, _reader->stats().uncompressed_bytes_read);
COUNTER_UPDATE(_parent->bytes_read_counter(), _reader->stats().bytes_read);
COUNTER_UPDATE(_parent->_block_load_timer, _reader->stats().block_load_ns);
COUNTER_UPDATE(_parent->_block_load_counter, _reader->stats().blocks_load);
COUNTER_UPDATE(_parent->_block_fetch_timer, _reader->stats().block_fetch_ns);
COUNTER_UPDATE(_parent->_raw_rows_counter, _reader->stats().raw_rows_read);
// COUNTER_UPDATE(_parent->_filtered_rows_counter, _reader->stats().num_rows_filtered);
COUNTER_UPDATE(_parent->_vec_cond_timer, _reader->stats().vec_cond_ns);
COUNTER_UPDATE(_parent->_rows_vec_cond_counter, _reader->stats().rows_vec_cond_filtered);
COUNTER_UPDATE(_parent->_stats_filtered_counter, _reader->stats().rows_stats_filtered);
COUNTER_UPDATE(_parent->_del_filtered_counter, _reader->stats().rows_del_filtered);
COUNTER_UPDATE(_parent->_index_load_timer, _reader->stats().index_load_ns);
PaloMetrics::query_scan_bytes.increment(_reader->stats().compressed_bytes_read);
PaloMetrics::query_scan_rows.increment(_reader->stats().raw_rows_read);
_has_update_counter = true;
}
Status OlapScanner::close(RuntimeState* state) {
if (_is_closed) {
return Status::OK;
}
update_counter();
_reader.reset();
Expr::close(_conjunct_ctxs, state);
_is_closed = true;
return Status::OK;
}
} // namespace palo
|
morningman/palo
|
be/src/exec/olap_scanner.cpp
|
C++
|
apache-2.0
| 17,960
|
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {getMode} from '../mode';
import {urls} from '../config';
/**
* Internal structure that maintains the state of an extension through loading.
*
* @typedef {{
* extensionId: (string|undefined),
* extensionVersion: (string|undefined),
* }}
* @private
*/
let ExtensionInfoDef;
/**
* Calculate the base url for any scripts.
* @param {!Location} location The window's location
* @param {boolean=} opt_isLocalDev
* @return {string}
*/
export function calculateScriptBaseUrl(location, opt_isLocalDev) {
if (opt_isLocalDev) {
let prefix = `${location.protocol}//${location.host}`;
if (
location.protocol == 'about:' ||
location.protocol == 'blob:' ||
location.protocol == 'data:'
) {
prefix = '';
}
return `${prefix}/dist`;
}
return urls.cdn;
}
/**
* Calculate script url for an extension.
* @param {!Location} location The window's location
* @param {string} extensionId
* @param {string=} opt_extensionVersion
* @param {boolean=} opt_isLocalDev
* @return {string}
*/
export function calculateExtensionScriptUrl(
location,
extensionId,
opt_extensionVersion,
opt_isLocalDev
) {
const fileExtension = getMode().esm ? '.mjs' : '.js';
const base = calculateScriptBaseUrl(location, opt_isLocalDev);
const rtv = getMode().rtvVersion;
if (opt_extensionVersion == null) {
opt_extensionVersion = '0.1';
}
const extensionVersion = opt_extensionVersion
? '-' + opt_extensionVersion
: '';
return `${base}/rtv/${rtv}/v0/${extensionId}${extensionVersion}${fileExtension}`;
}
/**
* Calculate script url for an entry point.
* If `opt_rtv` is true, returns the URL matching the current RTV.
* @param {!Location} location The window's location
* @param {string} entryPoint
* @param {boolean=} isLocalDev
* @param {boolean=} opt_rtv
* @return {string}
*/
export function calculateEntryPointScriptUrl(
location,
entryPoint,
isLocalDev,
opt_rtv
) {
const fileExtension = getMode().esm ? '.mjs' : '.js';
const base = calculateScriptBaseUrl(location, isLocalDev);
if (isLocalDev) {
return `${base}/${entryPoint}${fileExtension}`;
}
if (opt_rtv) {
return `${base}/rtv/${getMode().rtvVersion}/${entryPoint}${fileExtension}`;
}
return `${base}/${entryPoint}${fileExtension}`;
}
/**
* Parse the extension version from a given script URL.
* @param {string} scriptUrl
* @return {!ExtensionInfoDef}
*/
export function parseExtensionUrl(scriptUrl) {
// Note that the "(\.max)?" group only applies to local dev.
const matches = scriptUrl.match(
/^(.*)\/(.*)-([0-9.]+|latest)(\.max)?\.(?:js|mjs)$/i
);
return {
extensionId: matches ? matches[2] : undefined,
extensionVersion: matches ? matches[3] : undefined,
};
}
|
zhouyx/amphtml
|
src/service/extension-location.js
|
JavaScript
|
apache-2.0
| 3,387
|
package net.vonbuchholtz.sbt.dependencycheck
import org.owasp.dependencycheck.Engine
import sbt.Logger
object DependencyCheckUpdateTask {
def update(engine: Engine, log: Logger): Unit = {
try {
engine.doUpdates()
} catch {
case e: Exception =>
log.error(s"An exception occurred connecting to the local database: ${e.getLocalizedMessage}")
throw e
}
}
}
|
albuch/dependency-check-sbt
|
src/main/scala/net/vonbuchholtz/sbt/dependencycheck/DependencyCheckUpdateTask.scala
|
Scala
|
apache-2.0
| 399
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import textwrap
from contextlib import closing
from xml.etree import ElementTree
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.subsystems.shader import Shader
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools
from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis import ZincAnalysis
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis_parser import ZincAnalysisParser
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.hash_utils import hash_file
from pants.base.workunit import WorkUnitLabel
from pants.java.distribution.distribution import DistributionLocator
from pants.option.custom_types import dict_option
from pants.util.contextutil import open_zip
from pants.util.dirutil import relativize_paths, safe_open
# Well known metadata file required to register scalac plugins with nsc.
_PLUGIN_INFO_FILE = 'scalac-plugin.xml'
class ZincCompile(JvmCompile):
"""Compile Scala and Java code using Zinc."""
_ZINC_MAIN = 'org.pantsbuild.zinc.Main'
_name = 'zinc'
_supports_concurrent_execution = True
@staticmethod
def write_plugin_info(resources_dir, target):
root = os.path.join(resources_dir, target.id)
plugin_info_file = os.path.join(root, _PLUGIN_INFO_FILE)
with safe_open(plugin_info_file, 'w') as f:
f.write(textwrap.dedent("""
<plugin>
<name>{}</name>
<classname>{}</classname>
</plugin>
""".format(target.plugin, target.classname)).strip())
return root, plugin_info_file
@classmethod
def subsystem_dependencies(cls):
return super(ZincCompile, cls).subsystem_dependencies() + (ScalaPlatform, DistributionLocator)
@classmethod
def get_args_default(cls, bootstrap_option_values):
return ('-S-encoding', '-SUTF-8', '-S-g:vars')
@classmethod
def get_warning_args_default(cls):
return ('-S-deprecation', '-S-unchecked')
@classmethod
def get_no_warning_args_default(cls):
return ('-S-nowarn',)
@classmethod
def register_options(cls, register):
super(ZincCompile, cls).register_options(register)
register('--plugins', advanced=True, action='append', fingerprint=True,
help='Use these scalac plugins.')
register('--plugin-args', advanced=True, type=dict_option, default={}, fingerprint=True,
help='Map from plugin name to list of arguments for that plugin.')
register('--name-hashing', advanced=True, action='store_true', default=False, fingerprint=True,
help='Use zinc name hashing.')
cls.register_jvm_tool(register,
'zinc',
classpath=[
JarDependency('org.pantsbuild', 'zinc', '1.0.8')
],
main=cls._ZINC_MAIN,
custom_rules=[
# The compiler-interface and sbt-interface tool jars carry xsbt and
# xsbti interfaces that are used across the shaded tool jar boundary so
# we preserve these root packages wholesale along with the core scala
# APIs.
Shader.exclude_package('scala', recursive=True),
Shader.exclude_package('xsbt', recursive=True),
Shader.exclude_package('xsbti', recursive=True),
])
def sbt_jar(name, **kwargs):
return JarDependency(org='com.typesafe.sbt', name=name, rev='0.13.9', **kwargs)
cls.register_jvm_tool(register,
'compiler-interface',
classpath=[
sbt_jar(name='compiler-interface',
classifier='sources',
# We just want the single compiler-interface jar and not its
# dep on scala-lang
intransitive=True)
])
cls.register_jvm_tool(register,
'sbt-interface',
classpath=[
sbt_jar(name='sbt-interface',
# We just want the single sbt-interface jar and not its dep
# on scala-lang
intransitive=True)
])
# By default we expect no plugin-jars classpath_spec is filled in by the user, so we accept an
# empty classpath.
cls.register_jvm_tool(register, 'plugin-jars', classpath=[])
def select(self, target):
return target.has_sources('.java') or target.has_sources('.scala')
def select_source(self, source_file_path):
return source_file_path.endswith('.java') or source_file_path.endswith('.scala')
def __init__(self, *args, **kwargs):
super(ZincCompile, self).__init__(*args, **kwargs)
# A directory independent of any other classpath which can contain per-target
# plugin resource files.
self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info')
self._lazy_plugin_args = None
def create_analysis_tools(self):
return AnalysisTools(DistributionLocator.cached().real_home, ZincAnalysisParser(), ZincAnalysis)
def zinc_classpath(self):
# Zinc takes advantage of tools.jar if it's presented in classpath.
# For example com.sun.tools.javac.Main is used for in process java compilation.
def locate_tools_jar():
try:
return DistributionLocator.cached(jdk=True).find_libs(['tools.jar'])
except DistributionLocator.Error:
self.context.log.info('Failed to locate tools.jar. '
'Install a JDK to increase performance of Zinc.')
return []
return self.tool_classpath('zinc') + locate_tools_jar()
def compiler_classpath(self):
return ScalaPlatform.global_instance().compiler_classpath(self.context.products)
def extra_compile_time_classpath_elements(self):
# Classpath entries necessary for our compiler plugins.
return self.plugin_jars()
def plugin_jars(self):
"""The classpath entries for jars containing code for enabled plugins."""
if self.get_options().plugins:
return self.tool_classpath('plugin-jars')
else:
return []
def plugin_args(self):
if self._lazy_plugin_args is None:
self._lazy_plugin_args = self._create_plugin_args()
return self._lazy_plugin_args
def _create_plugin_args(self):
if not self.get_options().plugins:
return []
plugin_args = self.get_options().plugin_args
active_plugins = self._find_plugins()
ret = []
for name, jar in active_plugins.items():
ret.append('-S-Xplugin:{}'.format(jar))
for arg in plugin_args.get(name, []):
ret.append('-S-P:{}:{}'.format(name, arg))
return ret
def _find_plugins(self):
"""Returns a map from plugin name to plugin jar."""
# Allow multiple flags and also comma-separated values in a single flag.
plugin_names = set([p for val in self.get_options().plugins for p in val.split(',')])
plugins = {}
buildroot = get_buildroot()
for jar in self.plugin_jars():
with open_zip(jar, 'r') as jarfile:
try:
with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file:
plugin_info = ElementTree.parse(plugin_info_file).getroot()
if plugin_info.tag != 'plugin':
raise TaskError(
'File {} in {} is not a valid scalac plugin descriptor'.format(_PLUGIN_INFO_FILE,
jar))
name = plugin_info.find('name').text
if name in plugin_names:
if name in plugins:
raise TaskError('Plugin {} defined in {} and in {}'.format(name, plugins[name], jar))
# It's important to use relative paths, as the compiler flags get embedded in the zinc
# analysis file, and we port those between systems via the artifact cache.
plugins[name] = os.path.relpath(jar, buildroot)
except KeyError:
pass
unresolved_plugins = plugin_names - set(plugins.keys())
if unresolved_plugins:
raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins)))
return plugins
def extra_products(self, target):
"""Override extra_products to produce a plugin information file."""
ret = []
if target.is_scalac_plugin and target.classname:
# NB: We don't yet support explicit in-line compilation of scala compiler plugins from
# the workspace to be used in subsequent compile rounds like we do for annotation processors
# with javac. This would require another GroupTask similar to AptCompile, but for scala.
root, plugin_info_file = self.write_plugin_info(self._plugin_info_dir, target)
ret.append((root, [plugin_info_file]))
return ret
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file,
log_file, settings):
# We add compiler_classpath to ensure the scala-library jar is on the classpath.
# TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't
# usually need. Be more selective?
# TODO(John Sirois): Do we need to do this at all? If adding scala-library to the classpath is
# only intended to allow target authors to omit a scala-library dependency, then ScalaLibrary
# already overrides traversable_dependency_specs to achieve the same end; arguably at a more
# appropriate level and certainly at a more appropriate granularity.
relativized_classpath = relativize_paths(self.compiler_classpath() + classpath, get_buildroot())
zinc_args = []
zinc_args.extend([
'-log-level', self.get_options().level,
'-analysis-cache', analysis_file,
'-classpath', ':'.join(relativized_classpath),
'-d', classes_output_dir
])
if not self.get_options().colors:
zinc_args.append('-no-color')
if not self.get_options().name_hashing:
zinc_args.append('-no-name-hashing')
if log_file:
zinc_args.extend(['-capture-log', log_file])
zinc_args.extend(['-compiler-interface', self.tool_jar('compiler-interface')])
zinc_args.extend(['-sbt-interface', self.tool_jar('sbt-interface')])
zinc_args.extend(['-scala-path', ':'.join(self.compiler_classpath())])
zinc_args += self.plugin_args()
if upstream_analysis:
zinc_args.extend(['-analysis-map',
','.join('{}:{}'.format(*kv) for kv in upstream_analysis.items())])
zinc_args += args
zinc_args.extend([
'-C-source', '-C{}'.format(settings.source_level),
'-C-target', '-C{}'.format(settings.target_level),
])
zinc_args.extend(settings.args)
jvm_options = list(self._jvm_options)
zinc_args.extend(sources)
self.log_zinc_file(analysis_file)
if self.runjava(classpath=self.zinc_classpath(),
main=self._ZINC_MAIN,
jvm_options=jvm_options,
args=zinc_args,
workunit_name='zinc',
workunit_labels=[WorkUnitLabel.COMPILER]):
raise TaskError('Zinc compile failed.')
def log_zinc_file(self, analysis_file):
self.context.log.debug('Calling zinc on: {} ({})'
.format(analysis_file,
hash_file(analysis_file).upper()
if os.path.exists(analysis_file)
else 'nonexistent'))
|
scode/pants
|
src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_compile.py
|
Python
|
apache-2.0
| 12,212
|
<!-- Header -->
<header>
<div class="container">
<div class="row">
<div class="col-md-8 col-md-push-2">
<div class="intro-text">
<span class="name">{{ site.data.vcf.title }} </span>
</div>
<br />
</div>
<div class="col-md-2 col-md-pull-8 col-xs-6">
<img src="/img/mssm.png" style="height:100px;"/>
</div>
<div class="col-md-2 col-xs-6">
<img src="/img/lablogo.jpg" style="height:100px;"/>
</div>
</div>
</div>
</header>
|
stuartscott/scottlab.github.io
|
_includes/vcf_header.html
|
HTML
|
apache-2.0
| 692
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.cluster.zookeeper;
import com.navercorp.pinpoint.common.server.cluster.zookeeper.CreateNodeMessage;
import com.navercorp.pinpoint.common.server.cluster.zookeeper.ZookeeperClient;
import com.navercorp.pinpoint.common.util.Assert;
import com.navercorp.pinpoint.common.util.CollectionUtils;
import com.navercorp.pinpoint.common.util.MapUtils;
import org.apache.curator.utils.ZKPaths;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* @author Taejin Koo
*/
public class ZookeeperClusterDataManagerHelper {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
public ZookeeperClusterDataManagerHelper() {
}
Map<String, byte[]> getCollectorData(ZookeeperClient client, String parentPath) {
try {
List<String> collectorList = client.getChildNodeList(parentPath, true);
if (CollectionUtils.isEmpty(collectorList)) {
return Collections.emptyMap();
}
Map<String, byte[]> map = new HashMap<>();
for (String collector : collectorList) {
String fullPath = ZKPaths.makePath(parentPath, collector);
byte[] data = client.getData(fullPath, true);
map.put(fullPath, data);
}
return map;
} catch (Exception e) {
logger.warn(e.getMessage(), e);
}
return Collections.emptyMap();
}
String extractCollectorClusterId(String path, String collectorClusterPath) {
int index = path.indexOf(collectorClusterPath);
int startPosition = index + collectorClusterPath.length() + 1;
if (path.length() > startPosition) {
String id = path.substring(startPosition);
return id;
}
return null;
}
public boolean pushZnode(ZookeeperClient client, CreateNodeMessage createNodeMessage) {
Objects.requireNonNull(createNodeMessage, "createNodeMessage");
try {
String nodePath = createNodeMessage.getNodePath();
client.createPath(nodePath);
client.createOrSetNode(createNodeMessage);
logger.info("Register Zookeeper node UniqPath = {}.", nodePath);
return true;
} catch (Exception e) {
logger.warn(e.getMessage(), e);
}
return false;
}
Map<String, byte[]> syncPullCollectorCluster(ZookeeperClient client, String path) {
Map<String, byte[]> map = getCollectorData(client, path);
if (MapUtils.isEmpty(map)) {
return Collections.emptyMap();
}
Map<String, byte[]> result = new HashMap<>();
for (Map.Entry<String, byte[]> entry : map.entrySet()) {
String key = entry.getKey();
byte[] value = entry.getValue();
String id = extractCollectorClusterId(key, path);
if (id == null) {
logger.error("Illegal Collector Path({}) found.", key);
continue;
}
result.put(id, value);
}
return result;
}
}
|
Xylus/pinpoint
|
web/src/main/java/com/navercorp/pinpoint/web/cluster/zookeeper/ZookeeperClusterDataManagerHelper.java
|
Java
|
apache-2.0
| 3,837
|
"""
FormWizard class -- implements a multi-page form, validating between each
step and storing the form's state as HTML hidden fields so that no state is
stored on the server side.
"""
import cPickle as pickle
from django import forms
from django.conf import settings
from django.http import Http404
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils.hashcompat import md5_constructor
from django.utils.translation import ugettext_lazy as _
from django.contrib.formtools.utils import security_hash
class FormWizard(object):
# The HTML (and POST data) field name for the "step" variable.
step_field_name="wizard_step"
# METHODS SUBCLASSES SHOULDN'T OVERRIDE ###################################
def __init__(self, form_list, initial=None):
"""
Start a new wizard with a list of forms.
form_list should be a list of Form classes (not instances).
"""
self.form_list = form_list[:]
self.initial = initial or {}
# Dictionary of extra template context variables.
self.extra_context = {}
# A zero-based counter keeping track of which step we're in.
self.step = 0
def __repr__(self):
return "step: %d\nform_list: %s\ninitial_data: %s" % (self.step, self.form_list, self.initial)
def get_form(self, step, data=None):
"Helper method that returns the Form instance for the given step."
return self.form_list[step](data, prefix=self.prefix_for_step(step), initial=self.initial.get(step, None))
def num_steps(self):
"Helper method that returns the number of steps."
# You might think we should just set "self.form_list = len(form_list)"
# in __init__(), but this calculation needs to be dynamic, because some
# hook methods might alter self.form_list.
return len(self.form_list)
def __call__(self, request, *args, **kwargs):
"""
Main method that does all the hard work, conforming to the Django view
interface.
"""
if 'extra_context' in kwargs:
self.extra_context.update(kwargs['extra_context'])
current_step = self.determine_step(request, *args, **kwargs)
self.parse_params(request, *args, **kwargs)
# Sanity check.
if current_step >= self.num_steps():
raise Http404('Step %s does not exist' % current_step)
# For each previous step, verify the hash and process.
# TODO: Move "hash_%d" to a method to make it configurable.
for i in range(current_step):
form = self.get_form(i, request.POST)
if request.POST.get("hash_%d" % i, '') != self.security_hash(request, form):
return self.render_hash_failure(request, i)
self.process_step(request, form, i)
# Process the current step. If it's valid, go to the next step or call
# done(), depending on whether any steps remain.
if request.method == 'POST':
form = self.get_form(current_step, request.POST)
else:
form = self.get_form(current_step)
if form.is_valid():
self.process_step(request, form, current_step)
next_step = current_step + 1
# If this was the last step, validate all of the forms one more
# time, as a sanity check, and call done().
num = self.num_steps()
if next_step == num:
final_form_list = [self.get_form(i, request.POST) for i in range(num)]
# Validate all the forms. If any of them fail validation, that
# must mean the validator relied on some other input, such as
# an external Web site.
for i, f in enumerate(final_form_list):
if not f.is_valid():
return self.render_revalidation_failure(request, i, f)
return self.done(request, final_form_list)
# Otherwise, move along to the next step.
else:
form = self.get_form(next_step)
self.step = current_step = next_step
return self.render(form, request, current_step)
def render(self, form, request, step, context=None):
"Renders the given Form object, returning an HttpResponse."
old_data = request.POST
prev_fields = []
if old_data:
hidden = forms.HiddenInput()
# Collect all data from previous steps and render it as HTML hidden fields.
for i in range(step):
old_form = self.get_form(i, old_data)
hash_name = 'hash_%s' % i
prev_fields.extend([bf.as_hidden() for bf in old_form])
prev_fields.append(hidden.render(hash_name, old_data.get(hash_name, self.security_hash(request, old_form))))
return self.render_template(request, form, ''.join(prev_fields), step, context)
# METHODS SUBCLASSES MIGHT OVERRIDE IF APPROPRIATE ########################
def prefix_for_step(self, step):
"Given the step, returns a Form prefix to use."
return str(step)
def render_hash_failure(self, request, step):
"""
Hook for rendering a template if a hash check failed.
step is the step that failed. Any previous step is guaranteed to be
valid.
This default implementation simply renders the form for the given step,
but subclasses may want to display an error message, etc.
"""
return self.render(self.get_form(step), request, step, context={'wizard_error': _('We apologize, but your form has expired. Please continue filling out the form from this page.')})
def render_revalidation_failure(self, request, step, form):
"""
Hook for rendering a template if final revalidation failed.
It is highly unlikely that this point would ever be reached, but See
the comment in __call__() for an explanation.
"""
return self.render(form, request, step)
def security_hash(self, request, form):
"""
Calculates the security hash for the given HttpRequest and Form instances.
Subclasses may want to take into account request-specific information,
such as the IP address.
"""
return security_hash(request, form)
def determine_step(self, request, *args, **kwargs):
"""
Given the request object and whatever *args and **kwargs were passed to
__call__(), returns the current step (which is zero-based).
Note that the result should not be trusted. It may even be a completely
invalid number. It's not the job of this method to validate it.
"""
if not request.POST:
return 0
try:
step = int(request.POST.get(self.step_field_name, 0))
except ValueError:
return 0
return step
def parse_params(self, request, *args, **kwargs):
"""
Hook for setting some state, given the request object and whatever
*args and **kwargs were passed to __call__(), sets some state.
This is called at the beginning of __call__().
"""
pass
def get_template(self, step):
"""
Hook for specifying the name of the template to use for a given step.
Note that this can return a tuple of template names if you'd like to
use the template system's select_template() hook.
"""
return 'forms/wizard.html'
def render_template(self, request, form, previous_fields, step, context=None):
"""
Renders the template for the given step, returning an HttpResponse object.
Override this method if you want to add a custom context, return a
different MIME type, etc. If you only need to override the template
name, use get_template() instead.
The template will be rendered with the following context:
step_field -- The name of the hidden field containing the step.
step0 -- The current step (zero-based).
step -- The current step (one-based).
step_count -- The total number of steps.
form -- The Form instance for the current step (either empty
or with errors).
previous_fields -- A string representing every previous data field,
plus hashes for completed forms, all in the form of
hidden fields. Note that you'll need to run this
through the "safe" template filter, to prevent
auto-escaping, because it's raw HTML.
"""
context = context or {}
context.update(self.extra_context)
return render_to_response(self.get_template(step), dict(context,
step_field=self.step_field_name,
step0=step,
step=step + 1,
step_count=self.num_steps(),
form=form,
previous_fields=previous_fields
), context_instance=RequestContext(request))
def process_step(self, request, form, step):
"""
Hook for modifying the FormWizard's internal state, given a fully
validated Form object. The Form is guaranteed to have clean, valid
data.
This method should *not* modify any of that data. Rather, it might want
to set self.extra_context or dynamically alter self.form_list, based on
previously submitted forms.
Note that this method is called every time a page is rendered for *all*
submitted steps.
"""
pass
# METHODS SUBCLASSES MUST OVERRIDE ########################################
def done(self, request, form_list):
"""
Hook for doing something with the validated data. This is responsible
for the final processing.
form_list is a list of Form instances, each containing clean, valid
data.
"""
raise NotImplementedError("Your %s class has not defined a done() method, which is required." % self.__class__.__name__)
|
sanjuro/RCJK
|
vendor/django/contrib/formtools/wizard.py
|
Python
|
apache-2.0
| 10,209
|
/* main.c - Application main entry point */
/*
* Copyright (c) 2015 Intel Corporation
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <zephyr/types.h>
#include <stdbool.h>
#include <stddef.h>
#include <string.h>
#include <errno.h>
#include <linker/sections.h>
#include <ztest.h>
#include <net/net_if.h>
#include <net/net_pkt.h>
#include <net/net_ip.h>
#include <net/net_core.h>
#include <net/ethernet.h>
#include <net/net_mgmt.h>
#include <net/net_event.h>
#include "icmpv6.h"
#include "ipv6.h"
#define NET_LOG_ENABLED 1
#include "net_private.h"
#if defined(CONFIG_NET_DEBUG_MLD)
#define DBG(fmt, ...) printk(fmt, ##__VA_ARGS__)
#else
#define DBG(fmt, ...)
#endif
static struct in6_addr my_addr = { { { 0x20, 0x01, 0x0d, 0xb8, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0x1 } } };
static struct in6_addr peer_addr = { { { 0x20, 0x01, 0x0d, 0xb8, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0x2 } } };
static struct in6_addr mcast_addr = { { { 0x20, 0x01, 0x0d, 0xb8, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0x1 } } };
static struct net_if *iface;
static bool is_group_joined;
static bool is_group_left;
static bool is_join_msg_ok;
static bool is_leave_msg_ok;
static bool is_query_received;
static bool is_report_sent;
static struct k_sem wait_data;
#define WAIT_TIME 500
#define WAIT_TIME_LONG MSEC_PER_SEC
#define MY_PORT 1969
#define PEER_PORT 13856
struct net_test_mld {
u8_t mac_addr[sizeof(struct net_eth_addr)];
struct net_linkaddr ll_addr;
};
int net_test_dev_init(struct device *dev)
{
return 0;
}
static u8_t *net_test_get_mac(struct device *dev)
{
struct net_test_mld *context = dev->driver_data;
if (context->mac_addr[2] == 0x00) {
/* 00-00-5E-00-53-xx Documentation RFC 7042 */
context->mac_addr[0] = 0x00;
context->mac_addr[1] = 0x00;
context->mac_addr[2] = 0x5E;
context->mac_addr[3] = 0x00;
context->mac_addr[4] = 0x53;
context->mac_addr[5] = sys_rand32_get();
}
return context->mac_addr;
}
static void net_test_iface_init(struct net_if *iface)
{
u8_t *mac = net_test_get_mac(net_if_get_device(iface));
net_if_set_link_addr(iface, mac, sizeof(struct net_eth_addr),
NET_LINK_ETHERNET);
}
#define NET_ICMP_HDR(pkt) ((struct net_icmp_hdr *)net_pkt_icmp_data(pkt))
static int tester_send(struct net_if *iface, struct net_pkt *pkt)
{
struct net_icmp_hdr *icmp = NET_ICMP_HDR(pkt);
if (!pkt->frags) {
TC_ERROR("No data to send!\n");
return -ENODATA;
}
if (icmp->type == NET_ICMPV6_MLDv2) {
/* FIXME, add more checks here */
NET_DBG("Received something....");
is_join_msg_ok = true;
is_leave_msg_ok = true;
is_report_sent = true;
k_sem_give(&wait_data);
}
net_pkt_unref(pkt);
return 0;
}
struct net_test_mld net_test_data;
static struct net_if_api net_test_if_api = {
.init = net_test_iface_init,
.send = tester_send,
};
#define _ETH_L2_LAYER DUMMY_L2
#define _ETH_L2_CTX_TYPE NET_L2_GET_CTX_TYPE(DUMMY_L2)
NET_DEVICE_INIT(net_test_mld, "net_test_mld",
net_test_dev_init, &net_test_data, NULL,
CONFIG_KERNEL_INIT_PRIORITY_DEFAULT,
&net_test_if_api, _ETH_L2_LAYER, _ETH_L2_CTX_TYPE,
127);
static void group_joined(struct net_mgmt_event_callback *cb,
u32_t nm_event, struct net_if *iface)
{
is_group_joined = true;
k_sem_give(&wait_data);
}
static void group_left(struct net_mgmt_event_callback *cb,
u32_t nm_event, struct net_if *iface)
{
is_group_left = true;
k_sem_give(&wait_data);
}
static struct mgmt_events {
u32_t event;
net_mgmt_event_handler_t handler;
struct net_mgmt_event_callback cb;
} mgmt_events[] = {
{ .event = NET_EVENT_IPV6_MCAST_JOIN, .handler = group_joined },
{ .event = NET_EVENT_IPV6_MCAST_LEAVE, .handler = group_left },
{ 0 }
};
static void setup_mgmt_events(void)
{
int i;
for (i = 0; mgmt_events[i].event; i++) {
net_mgmt_init_event_callback(&mgmt_events[i].cb,
mgmt_events[i].handler,
mgmt_events[i].event);
net_mgmt_add_event_callback(&mgmt_events[i].cb);
}
}
static void mld_setup(void)
{
struct net_if_addr *ifaddr;
setup_mgmt_events();
iface = net_if_get_default();
zassert_not_null(iface, "Interface is NULL");
ifaddr = net_if_ipv6_addr_add(iface, &my_addr,
NET_ADDR_MANUAL, 0);
zassert_not_null(ifaddr, "Cannot add IPv6 address");
/* The semaphore is there to wait the data to be received. */
k_sem_init(&wait_data, 0, UINT_MAX);
}
static void join_group(void)
{
int ret;
net_ipv6_addr_create(&mcast_addr, 0xff02, 0, 0, 0, 0, 0, 0, 0x0001);
ret = net_ipv6_mld_join(iface, &mcast_addr);
zassert_equal(ret, 0, "Cannot join IPv6 multicast group");
k_yield();
}
static void leave_group(void)
{
int ret;
net_ipv6_addr_create(&mcast_addr, 0xff02, 0, 0, 0, 0, 0, 0, 0x0001);
ret = net_ipv6_mld_leave(iface, &mcast_addr);
zassert_equal(ret, 0, "Cannot leave IPv6 multicast group");
k_yield();
}
static void catch_join_group(void)
{
is_group_joined = false;
join_group();
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting join event");
}
if (!is_group_joined) {
zassert_true(0, "Did not catch join event");
}
is_group_joined = false;
}
static void catch_leave_group(void)
{
is_group_joined = false;
leave_group();
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting leave event");
}
if (!is_group_left) {
zassert_true(0, "Did not catch leave event");
}
is_group_left = false;
}
static void verify_join_group(void)
{
is_join_msg_ok = false;
join_group();
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting join event");
}
if (!is_join_msg_ok) {
zassert_true(0, "Join msg invalid");
}
is_join_msg_ok = false;
}
static void verify_leave_group(void)
{
is_leave_msg_ok = false;
leave_group();
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting leave event");
}
if (!is_leave_msg_ok) {
zassert_true(0, "Leave msg invalid");
}
is_leave_msg_ok = false;
}
static void send_query(struct net_if *iface)
{
struct net_pkt *pkt;
struct in6_addr dst;
u16_t pos;
/* Sent to all MLDv2-capable routers */
net_ipv6_addr_create(&dst, 0xff02, 0, 0, 0, 0, 0, 0, 0x0016);
pkt = net_pkt_get_reserve_tx(net_if_get_ll_reserve(iface, &dst),
K_FOREVER);
pkt = net_ipv6_create_raw(pkt,
&peer_addr,
&dst,
iface,
NET_IPV6_NEXTHDR_HBHO);
NET_IPV6_HDR(pkt)->hop_limit = 1; /* RFC 3810 ch 7.4 */
/* Add hop-by-hop option and router alert option, RFC 3810 ch 5. */
net_pkt_append_u8(pkt, IPPROTO_ICMPV6);
net_pkt_append_u8(pkt, 0); /* length (0 means 8 bytes) */
#define ROUTER_ALERT_LEN 8
/* IPv6 router alert option is described in RFC 2711. */
net_pkt_append_be16(pkt, 0x0502); /* RFC 2711 ch 2.1 */
net_pkt_append_be16(pkt, 0); /* pkt contains MLD msg */
net_pkt_append_u8(pkt, 1); /* padn */
net_pkt_append_u8(pkt, 0); /* padn len */
/* ICMPv6 header */
net_pkt_append_u8(pkt, NET_ICMPV6_MLD_QUERY); /* type */
net_pkt_append_u8(pkt, 0); /* code */
net_pkt_append_be16(pkt, 0); /* chksum */
net_pkt_append_be16(pkt, 3); /* maximum response code */
net_pkt_append_be16(pkt, 0); /* reserved field */
net_pkt_append_all(pkt, sizeof(struct in6_addr),
(const u8_t *)net_ipv6_unspecified_address(),
K_FOREVER); /* multicast address */
net_pkt_append_be16(pkt, 0); /* Resv, S, QRV and QQIC */
net_pkt_append_be16(pkt, 0); /* number of addresses */
net_ipv6_finalize_raw(pkt, NET_IPV6_NEXTHDR_HBHO);
net_pkt_set_iface(pkt, iface);
net_pkt_write_be16(pkt, pkt->frags,
NET_IPV6H_LEN + ROUTER_ALERT_LEN + 2,
&pos, ntohs(~net_calc_chksum_icmpv6(pkt)));
net_recv_data(iface, pkt);
}
/* We are not really interested to parse the query at this point */
static enum net_verdict handle_mld_query(struct net_pkt *pkt)
{
is_query_received = true;
return NET_DROP;
}
static struct net_icmpv6_handler mld_query_input_handler = {
.type = NET_ICMPV6_MLD_QUERY,
.code = 0,
.handler = handle_mld_query,
};
static void catch_query(void)
{
is_query_received = false;
net_icmpv6_register_handler(&mld_query_input_handler);
send_query(net_if_get_default());
k_yield();
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting query event");
}
if (!is_query_received) {
zassert_true(0, "Query msg invalid");
}
is_query_received = false;
}
static void verify_send_report(void)
{
/* We need to remove our temporary handler so that the
* stack handler is called instead.
*/
net_icmpv6_unregister_handler(&mld_query_input_handler);
is_query_received = false;
is_report_sent = false;
join_group();
send_query(net_if_get_default());
k_yield();
/* Did we send a report? */
if (k_sem_take(&wait_data, WAIT_TIME)) {
zassert_true(0, "Timeout while waiting report");
}
if (!is_report_sent) {
zassert_true(0, "Report not sent");
}
}
/* This value should be longer that the one in net_if.c when DAD timeouts */
#define DAD_TIMEOUT (MSEC_PER_SEC / 5)
static void test_allnodes(void)
{
struct net_if *iface = NULL;
struct net_if_mcast_addr *ifmaddr;
struct in6_addr addr;
net_ipv6_addr_create_ll_allnodes_mcast(&addr);
/* Let the DAD succeed so that the multicast address will be there */
k_sleep(DAD_TIMEOUT);
ifmaddr = net_if_ipv6_maddr_lookup(&addr, &iface);
zassert_not_null(ifmaddr, "Interface does not contain "
"allnodes multicast address");
}
static void test_solicit_node(void)
{
struct net_if *iface = NULL;
struct net_if_mcast_addr *ifmaddr;
struct in6_addr addr;
net_ipv6_addr_create_solicited_node(&my_addr, &addr);
ifmaddr = net_if_ipv6_maddr_lookup(&addr, &iface);
zassert_not_null(ifmaddr, "Interface does not contain "
"solicit node multicast address");
}
void test_main(void)
{
ztest_test_suite(net_mld_test,
ztest_unit_test(mld_setup),
ztest_unit_test(join_group),
ztest_unit_test(leave_group),
ztest_unit_test(catch_join_group),
ztest_unit_test(catch_leave_group),
ztest_unit_test(verify_join_group),
ztest_unit_test(verify_leave_group),
ztest_unit_test(catch_query),
ztest_unit_test(verify_send_report),
ztest_unit_test(test_allnodes),
ztest_unit_test(test_solicit_node)
);
ztest_run_test_suite(net_mld_test);
}
|
rsalveti/zephyr
|
tests/net/mld/src/main.c
|
C
|
apache-2.0
| 10,236
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agimatec.validation.jsr303;
/**
* Description: <br/>
* User: roman <br/>
* Date: 01.02.2010 <br/>
* Time: 12:28:56 <br/>
* Copyright: Agimatec GmbH
*/
public class AppendValidationToBuilder implements AppendValidation {
private final AnnotationConstraintBuilder builder;
public AppendValidationToBuilder(AnnotationConstraintBuilder builder) {
this.builder = builder;
}
public void append(ConstraintValidation validation) {
builder.addComposed(validation);
}
}
|
google-code/agimatec-validation
|
agimatec-jsr303/src/main/java/com/agimatec/validation/jsr303/AppendValidationToBuilder.java
|
Java
|
apache-2.0
| 1,331
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.editor.impl;
import com.intellij.openapi.editor.ex.LineIterator;
import com.intellij.openapi.util.text.LineTokenizer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.BitUtil;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.util.text.MergingCharSequence;
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
import it.unimi.dsi.fastutil.bytes.ByteList;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntList;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
/**
* Data structure specialized for working with document text lines, i.e. stores information about line mapping to document
* offsets and provides convenient ways to work with that information like retrieving target line by document offset etc.
* <p/>
* Immutable.
*/
public final class LineSet {
private static final int MODIFIED_MASK = 0x4;
private static final int SEPARATOR_MASK = 0x3;
private final int[] myStarts;
private final byte[] myFlags; // MODIFIED_MASK bit is for is/setModified(line); SEPARATOR_MASK 2 bits stores line separator length: 0..2
private final int myLength;
private LineSet(int[] starts, byte[] flags, int length) {
myStarts = starts;
myFlags = flags;
myLength = length;
}
public static LineSet createLineSet(CharSequence text) {
return createLineSet(text, false);
}
@NotNull
private static LineSet createLineSet(@NotNull CharSequence text, boolean markModified) {
IntList starts = new IntArrayList();
ByteList flags = new ByteArrayList();
LineTokenizer lineTokenizer = new LineTokenizer(text);
while (!lineTokenizer.atEnd()) {
starts.add(lineTokenizer.getOffset());
flags.add((byte) (lineTokenizer.getLineSeparatorLength() | (markModified ? MODIFIED_MASK : 0)));
lineTokenizer.advance();
}
return new LineSet(starts.toIntArray(), flags.toByteArray(), text.length());
}
@NotNull
LineSet update(@NotNull CharSequence prevText, int start, int end, @NotNull CharSequence replacement, boolean wholeTextReplaced) {
if (myLength == 0) {
return createLineSet(replacement, !wholeTextReplaced);
}
// if we're breaking or creating a '\r\n' pair, expand the changed range to include it fully
CharSequence newText = StringUtil.replaceSubSequence(prevText, start, end, replacement);
if (hasChar(prevText, start - 1, '\r') &&
(hasChar(prevText, start, '\n') || hasChar(newText, start, '\n'))) {
replacement = new MergingCharSequence("\r", replacement);
start--;
}
if (hasChar(prevText, end, '\n') &&
(hasChar(prevText, end -1, '\r') || hasChar(newText, start + replacement.length() - 1, '\r'))) {
replacement = new MergingCharSequence(replacement, "\n");
end++;
}
LineSet result = isSingleLineChange(start, end, replacement)
? updateInsideOneLine(findLineIndex(start), replacement.length() - (end - start))
: genericUpdate(start, end, replacement);
return wholeTextReplaced ? result.clearModificationFlags() : result;
}
private static boolean hasChar(CharSequence s, int index, char c) {
return index >= 0 && index < s.length() && s.charAt(index) == c;
}
private boolean isSingleLineChange(int start, int end, @NotNull CharSequence replacement) {
if (start == 0 && end == myLength && replacement.length() == 0) return false;
int startLine = findLineIndex(start);
return startLine == findLineIndex(end) && !CharArrayUtil.containLineBreaks(replacement) && !isLastEmptyLine(startLine);
}
@NotNull
private LineSet updateInsideOneLine(int line, int lengthDelta) {
int[] starts = myStarts.clone();
for (int i = line + 1; i < starts.length; i++) {
starts[i] += lengthDelta;
}
byte[] flags = myFlags.clone();
flags[line] |= MODIFIED_MASK;
return new LineSet(starts, flags, myLength + lengthDelta);
}
private LineSet genericUpdate(int startOffset, int endOffset, CharSequence replacement) {
int startLine = findLineIndex(startOffset);
int endLine = findLineIndex(endOffset);
LineSet patch = createLineSet(replacement, true);
int lengthShift = patch.myLength - (endOffset - startOffset);
int startLineStart = getLineStart(startLine);
boolean addStartLine = startOffset - startLineStart > 0 || patch.myStarts.length > 0 || endOffset < myLength;
boolean addEndLine = endOffset < myLength && patch.myLength > 0 && patch.getSeparatorLength(patch.myStarts.length - 1) > 0;
int newLineCount = startLine + (addStartLine ? 1 : 0) +
Math.max(patch.myStarts.length - 1, 0) +
(addEndLine ? 1 : 0) + Math.max(myStarts.length - endLine - 1, 0);
int[] starts = new int[newLineCount];
byte[] flags = new byte[newLineCount];
if (startLine > 0) {
System.arraycopy(myStarts, 0, starts, 0, startLine);
System.arraycopy(myFlags, 0, flags, 0, startLine);
}
int toIndex = startLine;
if (addStartLine) {
starts[toIndex] = startLineStart;
flags[toIndex] = patch.myStarts.length > 0 ? patch.myFlags[0] : MODIFIED_MASK;
toIndex++;
}
toIndex = patch.shiftData(starts, flags, 1, toIndex, patch.myStarts.length - 1, startOffset);
if (endOffset < myLength) {
if (addEndLine) {
starts[toIndex] = endOffset + lengthShift;
flags[toIndex] = (byte) (myFlags[endLine] | MODIFIED_MASK);
toIndex++;
} else if (toIndex > 0) {
flags[toIndex - 1] = (byte) (myFlags[endLine] | MODIFIED_MASK);
}
}
shiftData(starts, flags, endLine + 1, toIndex, myStarts.length - (endLine + 1), lengthShift);
return new LineSet(starts, flags, myLength + lengthShift);
}
private int shiftData(int[] dstStarts, byte[] dstFlags, int srcOffset, int dstOffset, int count, int offsetDelta) {
if (count < 0) return dstOffset;
System.arraycopy(myFlags, srcOffset, dstFlags, dstOffset, count);
for (int i = 0; i < count; i++) {
dstStarts[dstOffset + i] = myStarts[srcOffset + i] + offsetDelta;
}
return dstOffset + count;
}
public int findLineIndex(int offset) {
if (offset < 0 || offset > myLength) {
throw new IndexOutOfBoundsException("Wrong offset: " + offset + ". Should be in range: [0, " + myLength + "]");
}
if (myLength == 0) return 0;
if (offset == myLength) return getLineCount() - 1;
int bsResult = Arrays.binarySearch(myStarts, offset);
return bsResult >= 0 ? bsResult : -bsResult - 2;
}
@NotNull
public LineIterator createIterator() {
return new LineIteratorImpl(this);
}
public final int getLineStart(int index) {
checkLineIndex(index);
return isLastEmptyLine(index) ? myLength : myStarts[index];
}
private boolean isLastEmptyLine(int index) {
return index == myFlags.length && hasEol(index - 1);
}
private boolean hasEol(int lineIndex) {
return lineIndex >= 0 && getSeparatorLengthUnsafe(lineIndex) > 0;
}
public final int getLineEnd(int index) {
checkLineIndex(index);
return index >= myStarts.length - 1 ? myLength : myStarts[index + 1];
}
private void checkLineIndex(int index) {
if (index < 0 || index >= getLineCount()) {
throw new IndexOutOfBoundsException("Wrong line: " + index + ". Available lines count: " + getLineCount());
}
}
final boolean isModified(int index) {
checkLineIndex(index);
return !isLastEmptyLine(index) && BitUtil.isSet(myFlags[index], MODIFIED_MASK);
}
@NotNull
final LineSet setModified(@NotNull IntList indices) {
if (indices.isEmpty()) {
return this;
}
if (indices.size() == 1) {
int index = indices.getInt(0);
if (isLastEmptyLine(index) || isModified(index)) return this;
}
byte[] flags = myFlags.clone();
for (int i=0; i<indices.size();i++) {
int index = indices.getInt(i);
flags[index] |= MODIFIED_MASK;
}
return new LineSet(myStarts, flags, myLength);
}
@NotNull
LineSet clearModificationFlags(int startLine, int endLine) {
if (startLine > endLine) {
throw new IllegalArgumentException("endLine < startLine: " + endLine + " < " + startLine + "; lineCount: " + getLineCount());
}
checkLineIndex(startLine);
checkLineIndex(endLine - 1);
if (isLastEmptyLine(endLine - 1)) endLine--;
if (startLine >= endLine) return this;
byte[] flags = myFlags.clone();
for (int i = startLine; i < endLine; i++) {
flags[i] &= ~MODIFIED_MASK;
}
return new LineSet(myStarts, flags, myLength);
}
@NotNull
LineSet clearModificationFlags() {
return getLineCount() == 0 ? this : clearModificationFlags(0, getLineCount());
}
final int getSeparatorLength(int index) {
checkLineIndex(index);
return getSeparatorLengthUnsafe(index);
}
private int getSeparatorLengthUnsafe(int index) {
return index < myFlags.length ? myFlags[index] & SEPARATOR_MASK : 0;
}
final int getLineCount() {
return myStarts.length + (isLastEmptyLine(myStarts.length) ? 1 : 0);
}
int getLength() {
return myLength;
}
}
|
allotria/intellij-community
|
platform/core-impl/src/com/intellij/openapi/editor/impl/LineSet.java
|
Java
|
apache-2.0
| 9,330
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.jmeter.extractor;
import java.util.List;
import org.apache.jmeter.threads.JMeterContextService;
import org.apache.jorphan.util.JOrphanUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
/**
* JSoup based CSS/JQuery extractor
* see http://jsoup.org/cookbook/extracting-data/selector-syntax
* @since 2.9
*/
public class JSoupExtractor implements Extractor {
/**
*
*/
private static final long serialVersionUID = -6308012192067714191L;
private static final String CACHE_KEY_PREFIX = JSoupExtractor.class.getName()+"_PARSED_BODY";
/**
*
*/
public JSoupExtractor() {
super();
}
/**
* @see org.apache.jmeter.extractor.Extractor#extract(String, String, int, String, List, int, String)
*/
@Override
public int extract(String expression, String attribute, int matchNumber,
String inputString, List<String> result, int found,
String cacheKey) {
Document document;
if (cacheKey != null) {
document = (Document)
JMeterContextService.getContext().getSamplerContext().get(CACHE_KEY_PREFIX+cacheKey);
if(document==null) {
document = Jsoup.parse(inputString);
JMeterContextService.getContext().getSamplerContext().put(CACHE_KEY_PREFIX+cacheKey, document);
}
} else {
document = Jsoup.parse(inputString);
}
Elements elements = document.select(expression);
for (Element element : elements) {
if (matchNumber <= 0 || found != matchNumber) {
result.add(extractValue(attribute, element));
found++;
} else {
break;
}
}
return found;
}
/**
*
* @param attribute Attribute to extract
* @param element Element
* @return String value
*/
private String extractValue(String attribute, Element element) {
if (!JOrphanUtils.isBlank(attribute)) {
return element.attr(attribute);
} else {
return element.text().trim();
}
}
}
|
ra0077/jmeter
|
src/components/org/apache/jmeter/extractor/JSoupExtractor.java
|
Java
|
apache-2.0
| 3,051
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.instructions.mr;
import java.util.HashMap;
import org.apache.sysml.lops.Ctable;
import org.apache.sysml.lops.Ctable.OperationTypes;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.matrix.data.CTableMap;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixValue;
import org.apache.sysml.runtime.matrix.data.OperationsOnMatrixValues;
import org.apache.sysml.runtime.matrix.mapred.CachedValueMap;
import org.apache.sysml.runtime.matrix.mapred.IndexedMatrixValue;
public class CtableInstruction extends MRInstruction {
private OperationTypes _op;
public byte input1;
public byte input2;
public byte input3;
public double scalar_input2;
public double scalar_input3;
private long _outputDim1, _outputDim2;
private CtableInstruction(MRType type, OperationTypes op, byte in1, double scalar_in2, double scalar_in3, byte out,
long outputDim1, long outputDim2, String istr) {
super(type, null, out);
_op = op;
input1 = in1;
scalar_input2 = scalar_in2;
scalar_input3 = scalar_in3;
_outputDim1 = outputDim1;
_outputDim2 = outputDim2;
instString = istr;
}
private CtableInstruction(MRType type, OperationTypes op, byte in1, byte in2, double scalar_in3, byte out, long outputDim1,
long outputDim2, String istr) {
super(type, null, out);
_op = op;
input1 = in1;
input2 = in2;
scalar_input3 = scalar_in3;
_outputDim1 = outputDim1;
_outputDim2 = outputDim2;
instString = istr;
}
private CtableInstruction(MRType type, OperationTypes op, byte in1, double scalar_in2, byte in3, byte out, long outputDim1,
long outputDim2, String istr) {
super(type, null, out);
_op = op;
input1 = in1;
scalar_input2 = scalar_in2;
input3 = in3;
_outputDim1 = outputDim1;
_outputDim2 = outputDim2;
instString = istr;
}
protected CtableInstruction(MRType type, OperationTypes op, byte in1, byte in2, byte in3, byte out, long outputDim1,
long outputDim2, String istr) {
super(type, null, out);
_op = op;
input1 = in1;
input2 = in2;
input3 = in3;
_outputDim1 = outputDim1;
_outputDim2 = outputDim2;
instString = istr;
}
public long getOutputDim1() {
return _outputDim1;
}
public long getOutputDim2() {
return _outputDim2;
}
public boolean knownOutputDims() {
return (_outputDim1 >=0 && _outputDim2>=0);
}
public static CtableInstruction parseInstruction ( String str )
throws DMLRuntimeException
{
// example instruction string
// - ctabletransform:::0:DOUBLE:::1:DOUBLE:::2:DOUBLE:::3:DOUBLE
// - ctabletransformscalarweight:::0:DOUBLE:::1:DOUBLE:::1.0:DOUBLE:::3:DOUBLE
// - ctabletransformhistogram:::0:DOUBLE:::1.0:DOUBLE:::1.0:DOUBLE:::3:DOUBLE
// - ctabletransformweightedhistogram:::0:DOUBLE:::1:INT:::1:DOUBLE:::2:DOUBLE
//check number of fields
InstructionUtils.checkNumFields ( str, 6 );
//common setup
byte in1, in2, in3, out;
String[] parts = InstructionUtils.getInstructionParts ( str );
String opcode = parts[0];
in1 = Byte.parseByte(parts[1]);
long outputDim1 = (long) Double.parseDouble(parts[4]);
long outputDim2 = (long) Double.parseDouble(parts[5]);
out = Byte.parseByte(parts[6]);
OperationTypes op = Ctable.getOperationType(opcode);
switch( op )
{
case CTABLE_TRANSFORM: {
in2 = Byte.parseByte(parts[2]);
in3 = Byte.parseByte(parts[3]);
return new CtableInstruction(MRType.Ctable, op, in1, in2, in3, out, outputDim1, outputDim2, str);
}
case CTABLE_TRANSFORM_SCALAR_WEIGHT: {
in2 = Byte.parseByte(parts[2]);
double scalar_in3 = Double.parseDouble(parts[3]);
return new CtableInstruction(MRType.Ctable, op, in1, in2, scalar_in3, out, outputDim1, outputDim2, str);
}
case CTABLE_EXPAND_SCALAR_WEIGHT: {
double scalar_in2 = Double.parseDouble(parts[2]);
double type = Double.parseDouble(parts[3]); //used as type (1 left, 0 right)
return new CtableInstruction(MRType.Ctable, op, in1, scalar_in2, type, out, outputDim1, outputDim2, str);
}
case CTABLE_TRANSFORM_HISTOGRAM: {
double scalar_in2 = Double.parseDouble(parts[2]);
double scalar_in3 = Double.parseDouble(parts[3]);
return new CtableInstruction(MRType.Ctable, op, in1, scalar_in2, scalar_in3, out, outputDim1, outputDim2, str);
}
case CTABLE_TRANSFORM_WEIGHTED_HISTOGRAM: {
double scalar_in2 = Double.parseDouble(parts[2]);
in3 = Byte.parseByte(parts[3]);
return new CtableInstruction(MRType.Ctable, op, in1, scalar_in2, in3, out, outputDim1, outputDim2, str);
}
default:
throw new DMLRuntimeException("Unrecognized opcode in Ternary Instruction: " + op);
}
}
public void processInstruction(Class<? extends MatrixValue> valueClass, CachedValueMap cachedValues,
IndexedMatrixValue zeroInput, HashMap<Byte, CTableMap> resultMaps, HashMap<Byte, MatrixBlock> resultBlocks,
int blockRowFactor, int blockColFactor)
throws DMLRuntimeException
{
IndexedMatrixValue in1, in2, in3 = null;
in1 = cachedValues.getFirst(input1);
CTableMap ctableResult = null;
MatrixBlock ctableResultBlock = null;
if ( knownOutputDims() ) {
if ( resultBlocks != null ) {
ctableResultBlock = resultBlocks.get(output);
if ( ctableResultBlock == null ) {
// From MR, output of ctable is set to be sparse since it is built from a single input block.
ctableResultBlock = new MatrixBlock((int)_outputDim1, (int)_outputDim2, true);
resultBlocks.put(output, ctableResultBlock);
}
}
else {
throw new DMLRuntimeException("Unexpected error in processing table instruction.");
}
}
else {
//prepare aggregation maps
ctableResult=resultMaps.get(output);
if(ctableResult==null)
{
ctableResult = new CTableMap();
resultMaps.put(output, ctableResult);
}
}
//get inputs and process instruction
switch( _op )
{
case CTABLE_TRANSFORM: {
in2 = cachedValues.getFirst(input2);
in3 = cachedValues.getFirst(input3);
if(in1==null || in2==null || in3 == null )
return;
OperationsOnMatrixValues.performCtable(in1.getIndexes(), in1.getValue(), in2.getIndexes(), in2.getValue(),
in3.getIndexes(), in3.getValue(), ctableResult, ctableResultBlock, optr);
break;
}
case CTABLE_TRANSFORM_SCALAR_WEIGHT: {
// 3rd input is a scalar
in2 = cachedValues.getFirst(input2);
if(in1==null || in2==null )
return;
OperationsOnMatrixValues.performCtable(in1.getIndexes(), in1.getValue(), in2.getIndexes(), in2.getValue(),
scalar_input3, ctableResult, ctableResultBlock, optr);
break;
}
case CTABLE_EXPAND_SCALAR_WEIGHT: {
// 2nd and 3rd input is a scalar
if(in1==null )
return;
OperationsOnMatrixValues.performCtable(in1.getIndexes(), in1.getValue(), scalar_input2, (scalar_input3==1),
blockRowFactor, ctableResult, ctableResultBlock, optr);
break;
}
case CTABLE_TRANSFORM_HISTOGRAM: {
// 2nd and 3rd inputs are scalars
if(in1==null )
return;
OperationsOnMatrixValues.performCtable(in1.getIndexes(), in1.getValue(), scalar_input2, scalar_input3, ctableResult, ctableResultBlock, optr);
break;
}
case CTABLE_TRANSFORM_WEIGHTED_HISTOGRAM: {
// 2nd and 3rd inputs are scalars
in3 = cachedValues.getFirst(input3);
if(in1==null || in3==null)
return;
OperationsOnMatrixValues.performCtable(in1.getIndexes(), in1.getValue(), scalar_input2,
in3.getIndexes(), in3.getValue(), ctableResult, ctableResultBlock, optr);
break;
}
default:
throw new DMLRuntimeException("Unrecognized opcode in Tertiary Instruction: " + instString);
}
}
@Override
public void processInstruction(Class<? extends MatrixValue> valueClass,
CachedValueMap cachedValues, IndexedMatrixValue tempValue, IndexedMatrixValue zeroInput,
int blockRowFactor, int blockColFactor)
throws DMLRuntimeException
{
throw new DMLRuntimeException("This function should not be called!");
}
@Override
public byte[] getAllIndexes() throws DMLRuntimeException {
return new byte[]{input1, input2, input3, output};
}
@Override
public byte[] getInputIndexes() throws DMLRuntimeException {
return new byte[]{input1, input2, input3};
}
}
|
dusenberrymw/systemml
|
src/main/java/org/apache/sysml/runtime/instructions/mr/CtableInstruction.java
|
Java
|
apache-2.0
| 9,168
|
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import json
from .common import BaseTest, functional
from c7n.resources.aws import shape_validate
from c7n.utils import yaml_load
class TestSNS(BaseTest):
@functional
def test_sns_remove_matched(self):
session_factory = self.replay_flight_data("test_sns_remove_matched")
client = session_factory().client("sns")
name = "test-sns-remove-matched"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": {"AWS": "arn:aws:iam::644160558196:root"},
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "Public",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-rm-matched",
"resource": "sns",
"filters": [
{"TopicArn": topic_arn},
{"type": "cross-account", "whitelist": ["123456789012"]},
],
"actions": [{"type": "remove-statements", "statement_ids": "matched"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual([r["TopicArn"] for r in resources], [topic_arn])
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertEqual(
[s["Sid"] for s in data.get("Statement", ())], ["SpecificAllow"]
)
@functional
def test_sns_remove_named(self):
session_factory = self.replay_flight_data("test_sns_remove_named")
client = session_factory().client("sns")
name = "test-sns-remove-named"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-rm-named",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{"type": "remove-statements", "statement_ids": ["RemoveMe"]}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("RemoveMe" not in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_replace_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_replace_policy")
client = session_factory().client("sns")
name = "test_sns_modify_replace_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-replace-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "ReplaceWithMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": "*",
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue(
"ReplaceWithMe" in [s["Sid"] for s in data.get("Statement", ())]
)
@functional
def test_sns_account_id_template(self):
session_factory = self.replay_flight_data("test_sns_account_id_template")
client = session_factory().client("sns")
name = "test_sns_account_id_template"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-replace-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "__default_statement_ID_{account_id}",
"Effect": "Allow",
"Principal": {"Service": "s3.amazonaws.com"},
"Action": "SNS:Publish",
"Resource": topic_arn,
"Condition": {
"StringEquals": {
"AWS:SourceAccount": "{account_id}"
},
"ArnLike": {"aws:SourceArn": "arn:aws:s3:*:*:*"},
},
}
],
"remove-statements": "*",
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue(
"__default_statement_ID_" +
self.account_id in [s["Sid"] for s in data.get("Statement", ())]
)
@functional
def test_sns_modify_remove_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_remove_policy")
client = session_factory().client("sns")
name = "test_sns_modify_remove_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-remove-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [],
"remove-statements": ["RemoveMe"],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("RemoveMe" not in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_add_policy(self):
session_factory = self.replay_flight_data("test_sns_modify_add_policy")
client = session_factory().client("sns")
name = "test_sns_modify_add_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
}
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-add-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "AddMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": [],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
self.assertTrue("AddMe" in [s["Sid"] for s in data.get("Statement", ())])
@functional
def test_sns_modify_add_and_remove_policy(self):
session_factory = self.replay_flight_data(
"test_sns_modify_add_and_remove_policy"
)
client = session_factory().client("sns")
name = "test_sns_modify_add_and_remove_policy"
topic_arn = client.create_topic(Name=name)["TopicArn"]
self.addCleanup(client.delete_topic, TopicArn=topic_arn)
client.set_topic_attributes(
TopicArn=topic_arn,
AttributeName="Policy",
AttributeValue=json.dumps(
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "SpecificAllow",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:Subscribe"],
"Resource": topic_arn,
},
{
"Sid": "RemoveMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
},
],
}
),
)
p = self.load_policy(
{
"name": "sns-modify-add-and-remove-policy",
"resource": "sns",
"filters": [{"TopicArn": topic_arn}],
"actions": [
{
"type": "modify-policy",
"add-statements": [
{
"Sid": "AddMe",
"Effect": "Allow",
"Principal": "*",
"Action": ["SNS:GetTopicAttributes"],
"Resource": topic_arn,
}
],
"remove-statements": ["RemoveMe"],
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
data = json.loads(
client.get_topic_attributes(TopicArn=resources[0]["TopicArn"])[
"Attributes"
][
"Policy"
]
)
statement_ids = {s["Sid"] for s in data.get("Statement", ())}
self.assertTrue("AddMe" in statement_ids)
self.assertTrue("RemoveMe" not in statement_ids)
self.assertTrue("SpecificAllow" in statement_ids)
def test_sns_topic_encryption(self):
session_factory = self.replay_flight_data('test_sns_kms_related_filter_test')
kms = session_factory().client('kms')
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': 'arn:aws:sns:us-east-1:644160558196:test'
},
{
'type': 'kms-key',
'key': 'c7n:AliasName',
'value': 'alias/skunk/trails'
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertTrue(len(resources), 1)
aliases = kms.list_aliases(KeyId=resources[0]['KmsMasterKeyId'])
self.assertEqual(aliases['Aliases'][0]['AliasName'], 'alias/skunk/trails')
def test_set_sns_topic_encryption(self):
session_factory = self.replay_flight_data('test_sns_set_encryption')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'absent'
}
],
'actions': [
{
'type': 'set-encryption'
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
sns = session_factory().client('sns')
attributes = sns.get_topic_attributes(TopicArn=topic)
self.assertTrue(attributes['Attributes']['KmsMasterKeyId'], 'alias/aws/sns')
def test_sns_disable_encryption(self):
session_factory = self.replay_flight_data('test_sns_unset_encryption')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'alias/aws/sns'
}
],
'actions': [
{
'type': 'set-encryption',
'enabled': False
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
sns = session_factory().client('sns')
attributes = sns.get_topic_attributes(TopicArn=topic)['Attributes']
self.assertFalse(attributes.get('KmsMasterKeyId'))
def test_sns_set_encryption_custom_key(self):
session_factory = self.replay_flight_data('test_sns_set_encryption_custom_key')
topic = 'arn:aws:sns:us-west-1:644160558196:test'
key_alias = 'alias/alias/test/key'
sns = session_factory().client('sns')
p = self.load_policy(
{
'name': 'test-sns-kms-related-filter-alias',
'resource': 'sns',
'filters': [
{
'TopicArn': topic
},
{
'KmsMasterKeyId': 'absent'
}
],
'actions': [
{
'type': 'set-encryption',
'key': key_alias
}
]
},
session_factory=session_factory
)
resources = p.run()
self.assertEqual(len(resources), 1)
attributes = sns.get_topic_attributes(TopicArn=topic)['Attributes']
self.assertEqual(attributes.get('KmsMasterKeyId'), key_alias)
def test_sns_delete(self):
session_factory = self.replay_flight_data('test_sns_delete_topic')
policy = """
name: delete-sns
resource: aws.sns
filters:
- TopicArn: arn:aws:sns:us-west-1:644160558196:test
actions:
- type: delete
"""
p = self.load_policy(yaml_load(policy), session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client('sns')
resources = client.list_topics()['Topics']
self.assertEqual(len(resources), 0)
def test_sns_tag(self):
session_factory = self.replay_flight_data("test_sns_tag")
p = self.load_policy(
{
"name": "tag-sns",
"resource": "sns",
"filters": [{"tag:Tagging": "absent"}],
"actions": [{"type": "tag", "key": "Tagging", "value": "added"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertEqual(tags[0]["Value"], "added")
def test_sns_remove_tag(self):
session_factory = self.replay_flight_data(
"test_sns_remove_tag")
p = self.load_policy(
{
"name": "untag-sns",
"resource": "sns",
"filters": [
{
"type": "marked-for-op",
"tag": "custodian_cleanup",
"op": "delete",
}
],
"actions": [{"type": "remove-tag", "tags": ["custodian_cleanup"]}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertEqual(len(tags), 0)
def test_sns_mark_for_op(self):
session_factory = self.replay_flight_data(
"test_sns_mark_for_op"
)
p = self.load_policy(
{
"name": "sns-untagged-delete",
"resource": "sns",
"filters": [
{"tag:Tagging": "absent"},
{"tag:custodian_cleanup": "absent"},
],
"actions": [
{
"type": "mark-for-op",
"tag": "custodian_cleanup",
"op": "delete",
"days": 1,
}
],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = session_factory().client("sns")
tags = client.list_tags_for_resource(ResourceArn=resources[0]["TopicArn"])["Tags"]
self.assertTrue(tags[0]["Key"], "custodian_cleanup")
def test_sns_post_finding(self):
factory = self.replay_flight_data('test_sns_post_finding')
p = self.load_policy({
'name': 'sns',
'resource': 'aws.sns',
'actions': [
{'type': 'post-finding',
'types': [
'Software and Configuration Checks/OrgStandard/abc-123']}]},
session_factory=factory, config={'region': 'us-west-2'})
resources = p.resource_manager.get_resources([
'arn:aws:sns:us-west-2:644160558196:config-topic'])
rfinding = p.resource_manager.actions[0].format_resource(
resources[0])
self.assertEqual(
rfinding,
{'Details': {'AwsSnsTopic': {
'Owner': '644160558196',
'TopicName': 'config-topic'}},
'Id': 'arn:aws:sns:us-west-2:644160558196:config-topic',
'Partition': 'aws',
'Region': 'us-west-2',
'Type': 'AwsSnsTopic'})
shape_validate(
rfinding['Details']['AwsSnsTopic'],
'AwsSnsTopicDetails', 'securityhub')
def test_sns_config(self):
session_factory = self.replay_flight_data("test_sns_config")
p = self.load_policy(
{"name": "sns-config",
"source": "config",
"resource": "sns"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 2)
self.assertEqual(resources[0]['Tags'][0]['Value'], 'false')
class TestSubscription(BaseTest):
def test_subscription_delete(self):
factory = self.replay_flight_data("test_subscription_delete")
p = self.load_policy(
{
"name": "external-owner-delete",
"resource": "sns-subscription",
"filters": [
{
"type": "value",
"key": "Owner",
"value": "123456789099",
"op": "ne",
}
],
"actions": [{"type": "delete"}],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertNotEqual(resources[0]["Owner"], "123456789099")
client = factory().client("sns")
subs = client.list_subscriptions()
for s in subs.get("Subscriptions", []):
self.assertTrue("123456789099" == s.get("Owner"))
|
thisisshi/cloud-custodian
|
tests/test_sns.py
|
Python
|
apache-2.0
| 26,225
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DeleteNodeTxn_h__
#define DeleteNodeTxn_h__
#include "EditTxn.h"
#include "nsCOMPtr.h"
#include "nsCycleCollectionParticipant.h"
#include "nsIContent.h"
#include "nsINode.h"
#include "nsISupportsImpl.h"
#include "nscore.h"
class nsEditor;
class nsRangeUpdater;
/**
* A transaction that deletes a single element
*/
class DeleteNodeTxn : public EditTxn
{
public:
/** initialize the transaction.
* @param aElement the node to delete
*/
nsresult Init(nsEditor* aEditor, nsINode* aNode,
nsRangeUpdater* aRangeUpdater);
DeleteNodeTxn();
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DeleteNodeTxn, EditTxn)
NS_DECL_EDITTXN
NS_IMETHOD RedoTransaction();
protected:
/** the element to delete */
nsCOMPtr<nsINode> mNode;
/** parent of node to delete */
nsCOMPtr<nsINode> mParent;
/** next sibling to remember for undo/redo purposes */
nsCOMPtr<nsIContent> mRefNode;
/** the editor for this transaction */
nsEditor* mEditor;
/** range updater object */
nsRangeUpdater* mRangeUpdater;
};
#endif
|
sergecodd/FireFox-OS
|
B2G/gecko/editor/libeditor/base/DeleteNodeTxn.h
|
C
|
apache-2.0
| 1,375
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (9-Debian) on Thu Sep 28 23:13:22 GMT 2017 -->
<title>ErrorHandlerFactory (dollar-script 0.4.5195 API)</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="date" content="2017-09-28">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<link rel="stylesheet" type="text/css" href="../../../../jquery/jquery-ui.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
<script type="text/javascript" src="../../../../jquery/jszip/dist/jszip.min.js"></script>
<script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script>
<!--[if IE]>
<script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script>
<![endif]-->
<script type="text/javascript" src="../../../../jquery/jquery-1.10.2.js"></script>
<script type="text/javascript" src="../../../../jquery/jquery-ui.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ErrorHandlerFactory (dollar-script 0.4.5195 API)";
}
}
catch(err) {
}
//-->
var methods = {"i0":10,"i1":10,"i2":10,"i3":10,"i4":9};
var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],8:["t4","Concrete Methods"]};
var altColor = "altColor";
var rowColor = "rowColor";
var tableTab = "tableTab";
var activeTableTab = "activeTableTab";
var pathtoroot = "../../../../";loadScripts(document, 'script');</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="fixedNav">
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ErrorHandlerFactory.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../dollar/internal/runtime/script/DollarUtilFactory.html" title="class in dollar.internal.runtime.script"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../dollar/internal/runtime/script/NameType.html" title="class in dollar.internal.runtime.script"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?dollar/internal/runtime/script/ErrorHandlerFactory.html" target="_top">Frames</a></li>
<li><a href="ErrorHandlerFactory.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<ul class="navListSearch">
<li><span>SEARCH: </span>
<input type="text" id="search" value=" " disabled="disabled">
<input type="reset" id="reset" value=" " disabled="disabled">
</li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
</div>
<div class="navPadding"> </div>
<script type="text/javascript"><!--
$('.navPadding').css('padding-top', $('.fixedNav').css("height"));
//-->
</script>
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle"><span class="packageLabelInType">Package</span> <a href="../../../../dollar/internal/runtime/script/package-summary.html">dollar.internal.runtime.script</a></div>
<h2 title="Class ErrorHandlerFactory" class="title">Class ErrorHandlerFactory</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>dollar.internal.runtime.script.ErrorHandlerFactory</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd><code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></dd>
</dl>
<hr>
<br>
<pre>public class <span class="typeNameLabel">ErrorHandlerFactory</span>
extends java.lang.Object
implements <a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="memberSummary" summary="Method Summary table, listing methods, and an explanation">
<caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd"> </span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd"> </span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd"> </span></span><span id="t4" class="tableTab"><span><a href="javascript:show(8);">Concrete Methods</a></span><span class="tabEnd"> </span></span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colSecond" scope="col">Method</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tr id="i0" class="altColor">
<td class="colFirst"><code>@NotNull dollar.api.Value</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../dollar/internal/runtime/script/ErrorHandlerFactory.html#handle-dollar.api.Scope-dollar.api.script.Source-dollar.api.DollarException-">handle</a></span>​(@NotNull dollar.api.Scope scope,
@Nullable dollar.api.script.Source source,
@NotNull dollar.api.DollarException e)</code></th>
<td class="colLast"> </td>
</tr>
<tr id="i1" class="rowColor">
<td class="colFirst"><code>@NotNull dollar.api.Value</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../dollar/internal/runtime/script/ErrorHandlerFactory.html#handle-dollar.api.Scope-dollar.api.script.Source-dollar.internal.runtime.script.api.exceptions.DollarAssertionException-">handle</a></span>​(@NotNull dollar.api.Scope scope,
@Nullable dollar.api.script.Source source,
@NotNull <a href="../../../../dollar/internal/runtime/script/api/exceptions/DollarAssertionException.html" title="class in dollar.internal.runtime.script.api.exceptions">DollarAssertionException</a> e)</code></th>
<td class="colLast"> </td>
</tr>
<tr id="i2" class="altColor">
<td class="colFirst"><code>@NotNull dollar.api.Value</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../dollar/internal/runtime/script/ErrorHandlerFactory.html#handle-dollar.api.Scope-dollar.api.script.Source-java.lang.Exception-">handle</a></span>​(@NotNull dollar.api.Scope scope,
@Nullable dollar.api.script.Source source,
@NotNull java.lang.Exception e)</code></th>
<td class="colLast"> </td>
</tr>
<tr id="i3" class="rowColor">
<td class="colFirst"><code>void</code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../dollar/internal/runtime/script/ErrorHandlerFactory.html#handleTopLevel-java.lang.Exception-java.lang.String-java.io.File-">handleTopLevel</a></span>​(@NotNull java.lang.Exception t,
@Nullable java.lang.String name,
@Nullable java.io.File file)</code></th>
<td class="colLast"> </td>
</tr>
<tr id="i4" class="altColor">
<td class="colFirst"><code>static <a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../dollar/internal/runtime/script/ErrorHandlerFactory.html#instance--">instance</a></span>​()</code></th>
<td class="colLast"> </td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method.detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="instance--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>instance</h4>
<pre>public static <a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a> instance​()</pre>
</li>
</ul>
<a name="handle-dollar.api.Scope-dollar.api.script.Source-dollar.internal.runtime.script.api.exceptions.DollarAssertionException-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>handle</h4>
<pre>@NotNull
public @NotNull dollar.api.Value handle​(@NotNull
@NotNull dollar.api.Scope scope,
@Nullable
@Nullable dollar.api.script.Source source,
@NotNull
@NotNull <a href="../../../../dollar/internal/runtime/script/api/exceptions/DollarAssertionException.html" title="class in dollar.internal.runtime.script.api.exceptions">DollarAssertionException</a> e)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html#handle-dollar.api.Scope-dollar.api.script.Source-dollar.internal.runtime.script.api.exceptions.DollarAssertionException-">handle</a></code> in interface <code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></dd>
</dl>
</li>
</ul>
<a name="handle-dollar.api.Scope-dollar.api.script.Source-dollar.api.DollarException-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>handle</h4>
<pre>@NotNull
public @NotNull dollar.api.Value handle​(@NotNull
@NotNull dollar.api.Scope scope,
@Nullable
@Nullable dollar.api.script.Source source,
@NotNull
@NotNull dollar.api.DollarException e)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html#handle-dollar.api.Scope-dollar.api.script.Source-dollar.api.DollarException-">handle</a></code> in interface <code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></dd>
</dl>
</li>
</ul>
<a name="handle-dollar.api.Scope-dollar.api.script.Source-java.lang.Exception-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>handle</h4>
<pre>@NotNull
public @NotNull dollar.api.Value handle​(@NotNull
@NotNull dollar.api.Scope scope,
@Nullable
@Nullable dollar.api.script.Source source,
@NotNull
@NotNull java.lang.Exception e)</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html#handle-dollar.api.Scope-dollar.api.script.Source-java.lang.Exception-">handle</a></code> in interface <code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></dd>
</dl>
</li>
</ul>
<a name="handleTopLevel-java.lang.Exception-java.lang.String-java.io.File-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>handleTopLevel</h4>
<pre>public void handleTopLevel​(@NotNull
@NotNull java.lang.Exception t,
@Nullable
@Nullable java.lang.String name,
@Nullable
@Nullable java.io.File file)
throws java.lang.RuntimeException</pre>
<dl>
<dt><span class="overrideSpecifyLabel">Specified by:</span></dt>
<dd><code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html#handleTopLevel-T-java.lang.String-java.io.File-">handleTopLevel</a></code> in interface <code><a href="../../../../dollar/internal/runtime/script/api/ParserErrorHandler.html" title="interface in dollar.internal.runtime.script.api">ParserErrorHandler</a></code></dd>
<dt><span class="throwsLabel">Throws:</span></dt>
<dd><code>java.lang.RuntimeException</code></dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="class-use/ErrorHandlerFactory.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../dollar/internal/runtime/script/DollarUtilFactory.html" title="class in dollar.internal.runtime.script"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../dollar/internal/runtime/script/NameType.html" title="class in dollar.internal.runtime.script"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?dollar/internal/runtime/script/ErrorHandlerFactory.html" target="_top">Frames</a></li>
<li><a href="ErrorHandlerFactory.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li>Constr | </li>
<li><a href="#method.detail">Method</a></li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017. All rights reserved.</small></p>
</body>
</html>
|
neilellis/dollar
|
docs/dev/dollar-script/apidocs/dollar/internal/runtime/script/ErrorHandlerFactory.html
|
HTML
|
apache-2.0
| 17,413
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.commons.bcel6.verifier;
import java.awt.Dimension;
import java.awt.Toolkit;
import javax.swing.UIManager;
import org.apache.commons.bcel6.generic.Type;
/**
* A graphical user interface application demonstrating JustIce.
*
* @version $Id$
*/
public class GraphicalVerifier {
private boolean packFrame = false;
/** Constructor. */
private GraphicalVerifier() {
VerifierAppFrame frame = new VerifierAppFrame();
//Frames �berpr�fen, die voreingestellte Gr��e haben
//Frames packen, die nutzbare bevorzugte Gr��eninformationen enthalten, z.B. aus ihrem Layout
if (packFrame) {
frame.pack();
} else {
frame.validate();
}
//Das Fenster zentrieren
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
Dimension frameSize = frame.getSize();
if (frameSize.height > screenSize.height) {
frameSize.height = screenSize.height;
}
if (frameSize.width > screenSize.width) {
frameSize.width = screenSize.width;
}
frame.setLocation((screenSize.width - frameSize.width) / 2,
(screenSize.height - frameSize.height) / 2);
frame.setVisible(true);
frame.getClassNamesJList().setModel(new VerifierFactoryListModel());
VerifierFactory.getVerifier(Type.OBJECT.getClassName()); // Fill list with java.lang.Object
frame.getClassNamesJList().setSelectedIndex(0); // default, will verify java.lang.Object
}
/** Main method. */
public static void main( String[] args ) {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception e) {
e.printStackTrace();
}
new GraphicalVerifier();
}
}
|
mohanaraosv/commons-bcel
|
src/main/java/org/apache/commons/bcel6/verifier/GraphicalVerifier.java
|
Java
|
apache-2.0
| 2,652
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.machinelearning.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class CreateDataSourceFromRDSRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>DataSource</code>.
* Typically, an Amazon Resource Number (ARN) becomes the ID for a
* <code>DataSource</code>.
* </p>
*/
private String dataSourceId;
/**
* <p>
* A user-supplied name or description of the <code>DataSource</code>.
* </p>
*/
private String dataSourceName;
/**
* <p>
* The data specification of an Amazon RDS <code>DataSource</code>:
* </p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the Amazon
* RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by an
* Amazon Elastic Compute Cloud (EC2) instance to carry out the copy task
* from Amazon RDS to Amazon S3. For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS Data
* Pipeline service to monitor the progress of the copy task from Amazon RDS
* to Amazon Simple Storage Service (S3). For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the security
* entity IDs provided to allow access to the Amazon RDS instance. Specify a
* [<code>SubnetId</code>, <code>SecurityGroupIds</code>] pair for a
* VPC-based Amazon RDS instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation data for
* the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The data
* retrieved from Amazon RDS using <code>SelectSqlQuery</code> is stored in
* this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not required
* if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting requirement
* of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
* </ul>
*/
private RDSDataSpec rDSData;
/**
* <p>
* The role that Amazon ML assumes on behalf of the user to create and
* activate a data pipeline in the user’s account and copy data (using the
* <code>SelectSqlQuery</code>) query from Amazon RDS to Amazon S3.
* </p>
* <p>
* </p>
*/
private String roleARN;
/**
* <p>
* The compute statistics for a <code>DataSource</code>. The statistics are
* generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally during
* an <code>MLModel</code> training. This parameter must be set to
* <code>true</code> if the <code></code>DataSource<code></code> needs to be
* used for <code>MLModel</code> training.
* </p>
*/
private Boolean computeStatistics;
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>DataSource</code>.
* Typically, an Amazon Resource Number (ARN) becomes the ID for a
* <code>DataSource</code>.
* </p>
*
* @param dataSourceId
* A user-supplied ID that uniquely identifies the
* <code>DataSource</code>. Typically, an Amazon Resource Number
* (ARN) becomes the ID for a <code>DataSource</code>.
*/
public void setDataSourceId(String dataSourceId) {
this.dataSourceId = dataSourceId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>DataSource</code>.
* Typically, an Amazon Resource Number (ARN) becomes the ID for a
* <code>DataSource</code>.
* </p>
*
* @return A user-supplied ID that uniquely identifies the
* <code>DataSource</code>. Typically, an Amazon Resource Number
* (ARN) becomes the ID for a <code>DataSource</code>.
*/
public String getDataSourceId() {
return this.dataSourceId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>DataSource</code>.
* Typically, an Amazon Resource Number (ARN) becomes the ID for a
* <code>DataSource</code>.
* </p>
*
* @param dataSourceId
* A user-supplied ID that uniquely identifies the
* <code>DataSource</code>. Typically, an Amazon Resource Number
* (ARN) becomes the ID for a <code>DataSource</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateDataSourceFromRDSRequest withDataSourceId(String dataSourceId) {
setDataSourceId(dataSourceId);
return this;
}
/**
* <p>
* A user-supplied name or description of the <code>DataSource</code>.
* </p>
*
* @param dataSourceName
* A user-supplied name or description of the <code>DataSource</code>
* .
*/
public void setDataSourceName(String dataSourceName) {
this.dataSourceName = dataSourceName;
}
/**
* <p>
* A user-supplied name or description of the <code>DataSource</code>.
* </p>
*
* @return A user-supplied name or description of the
* <code>DataSource</code>.
*/
public String getDataSourceName() {
return this.dataSourceName;
}
/**
* <p>
* A user-supplied name or description of the <code>DataSource</code>.
* </p>
*
* @param dataSourceName
* A user-supplied name or description of the <code>DataSource</code>
* .
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateDataSourceFromRDSRequest withDataSourceName(
String dataSourceName) {
setDataSourceName(dataSourceName);
return this;
}
/**
* <p>
* The data specification of an Amazon RDS <code>DataSource</code>:
* </p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the Amazon
* RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by an
* Amazon Elastic Compute Cloud (EC2) instance to carry out the copy task
* from Amazon RDS to Amazon S3. For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS Data
* Pipeline service to monitor the progress of the copy task from Amazon RDS
* to Amazon Simple Storage Service (S3). For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the security
* entity IDs provided to allow access to the Amazon RDS instance. Specify a
* [<code>SubnetId</code>, <code>SecurityGroupIds</code>] pair for a
* VPC-based Amazon RDS instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation data for
* the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The data
* retrieved from Amazon RDS using <code>SelectSqlQuery</code> is stored in
* this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not required
* if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting requirement
* of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
* </ul>
*
* @param rDSData
* The data specification of an Amazon RDS <code>DataSource</code>
* :</p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the
* Amazon RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by
* an Amazon Elastic Compute Cloud (EC2) instance to carry out the
* copy task from Amazon RDS to Amazon S3. For more information, see
* <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS
* Data Pipeline service to monitor the progress of the copy task
* from Amazon RDS to Amazon Simple Storage Service (S3). For more
* information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the
* security entity IDs provided to allow access to the Amazon RDS
* instance. Specify a [<code>SubnetId</code>,
* <code>SecurityGroupIds</code>] pair for a VPC-based Amazon RDS
* instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation
* data for the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The
* data retrieved from Amazon RDS using <code>SelectSqlQuery</code>
* is stored in this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not
* required if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting
* requirement of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
*/
public void setRDSData(RDSDataSpec rDSData) {
this.rDSData = rDSData;
}
/**
* <p>
* The data specification of an Amazon RDS <code>DataSource</code>:
* </p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the Amazon
* RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by an
* Amazon Elastic Compute Cloud (EC2) instance to carry out the copy task
* from Amazon RDS to Amazon S3. For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS Data
* Pipeline service to monitor the progress of the copy task from Amazon RDS
* to Amazon Simple Storage Service (S3). For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the security
* entity IDs provided to allow access to the Amazon RDS instance. Specify a
* [<code>SubnetId</code>, <code>SecurityGroupIds</code>] pair for a
* VPC-based Amazon RDS instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation data for
* the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The data
* retrieved from Amazon RDS using <code>SelectSqlQuery</code> is stored in
* this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not required
* if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting requirement
* of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
* </ul>
*
* @return The data specification of an Amazon RDS <code>DataSource</code>
* :</p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.
* </li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the
* Amazon RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by
* an Amazon Elastic Compute Cloud (EC2) instance to carry out the
* copy task from Amazon RDS to Amazon S3. For more information, see
* <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS
* Data Pipeline service to monitor the progress of the copy task
* from Amazon RDS to Amazon Simple Storage Service (S3). For more
* information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon
* RDS instance. You need to set up appropriate ingress rules for
* the security entity IDs provided to allow access to the Amazon
* RDS instance. Specify a [<code>SubnetId</code>,
* <code>SecurityGroupIds</code>] pair for a VPC-based Amazon RDS
* instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation
* data for the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The
* data retrieved from Amazon RDS using <code>SelectSqlQuery</code>
* is stored in this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>
* .
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not
* required if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting
* requirement of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
*/
public RDSDataSpec getRDSData() {
return this.rDSData;
}
/**
* <p>
* The data specification of an Amazon RDS <code>DataSource</code>:
* </p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the Amazon
* RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by an
* Amazon Elastic Compute Cloud (EC2) instance to carry out the copy task
* from Amazon RDS to Amazon S3. For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS Data
* Pipeline service to monitor the progress of the copy task from Amazon RDS
* to Amazon Simple Storage Service (S3). For more information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the security
* entity IDs provided to allow access to the Amazon RDS instance. Specify a
* [<code>SubnetId</code>, <code>SecurityGroupIds</code>] pair for a
* VPC-based Amazon RDS instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation data for
* the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The data
* retrieved from Amazon RDS using <code>SelectSqlQuery</code> is stored in
* this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not required
* if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting requirement
* of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
* </ul>
*
* @param rDSData
* The data specification of an Amazon RDS <code>DataSource</code>
* :</p>
* <ul>
* <li>
* <p>
* DatabaseInformation -
* <ul>
* <li> <code>DatabaseName </code> - Name of the Amazon RDS database.</li>
* <li> <code> InstanceIdentifier </code> - Unique identifier for the
* Amazon RDS database instance.</li>
* </ul>
* </p>
* </li>
* <li>
* <p>
* DatabaseCredentials - AWS Identity and Access Management (IAM)
* credentials that are used to connect to the Amazon RDS database.
* </p>
* </li>
* <li>
* <p>
* ResourceRole - Role (DataPipelineDefaultResourceRole) assumed by
* an Amazon Elastic Compute Cloud (EC2) instance to carry out the
* copy task from Amazon RDS to Amazon S3. For more information, see
* <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* ServiceRole - Role (DataPipelineDefaultRole) assumed by the AWS
* Data Pipeline service to monitor the progress of the copy task
* from Amazon RDS to Amazon Simple Storage Service (S3). For more
* information, see <a href=
* "http://docs.aws.amazon.com/datapipeline/latest/DeveloperGuide/dp-iam-roles.html"
* >Role templates</a> for data pipelines.
* </p>
* </li>
* <li>
* <p>
* SecurityInfo - Security information to use to access an Amazon RDS
* instance. You need to set up appropriate ingress rules for the
* security entity IDs provided to allow access to the Amazon RDS
* instance. Specify a [<code>SubnetId</code>,
* <code>SecurityGroupIds</code>] pair for a VPC-based Amazon RDS
* instance.
* </p>
* </li>
* <li>
* <p>
* SelectSqlQuery - Query that is used to retrieve the observation
* data for the <code>Datasource</code>.
* </p>
* </li>
* <li>
* <p>
* S3StagingLocation - Amazon S3 location for staging RDS data. The
* data retrieved from Amazon RDS using <code>SelectSqlQuery</code>
* is stored in this location.
* </p>
* </li>
* <li>
* <p>
* DataSchemaUri - Amazon S3 location of the <code>DataSchema</code>.
* </p>
* </li>
* <li>
* <p>
* DataSchema - A JSON string representing the schema. This is not
* required if <code>DataSchemaUri</code> is specified.
* </p>
* </li>
* <li>
* <p>
* DataRearrangement - A JSON string representing the splitting
* requirement of a <code>Datasource</code>.
* </p>
* <br>
* <p>
* Sample -
* <code> "{\"splitting\":{\"percentBegin\":10,\"percentEnd\":60}}"</code>
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateDataSourceFromRDSRequest withRDSData(RDSDataSpec rDSData) {
setRDSData(rDSData);
return this;
}
/**
* <p>
* The role that Amazon ML assumes on behalf of the user to create and
* activate a data pipeline in the user’s account and copy data (using the
* <code>SelectSqlQuery</code>) query from Amazon RDS to Amazon S3.
* </p>
* <p>
* </p>
*
* @param roleARN
* The role that Amazon ML assumes on behalf of the user to create
* and activate a data pipeline in the user’s account and copy data
* (using the <code>SelectSqlQuery</code>) query from Amazon RDS to
* Amazon S3.</p>
* <p>
*/
public void setRoleARN(String roleARN) {
this.roleARN = roleARN;
}
/**
* <p>
* The role that Amazon ML assumes on behalf of the user to create and
* activate a data pipeline in the user’s account and copy data (using the
* <code>SelectSqlQuery</code>) query from Amazon RDS to Amazon S3.
* </p>
* <p>
* </p>
*
* @return The role that Amazon ML assumes on behalf of the user to create
* and activate a data pipeline in the user’s account and copy data
* (using the <code>SelectSqlQuery</code>) query from Amazon RDS to
* Amazon S3.</p>
* <p>
*/
public String getRoleARN() {
return this.roleARN;
}
/**
* <p>
* The role that Amazon ML assumes on behalf of the user to create and
* activate a data pipeline in the user’s account and copy data (using the
* <code>SelectSqlQuery</code>) query from Amazon RDS to Amazon S3.
* </p>
* <p>
* </p>
*
* @param roleARN
* The role that Amazon ML assumes on behalf of the user to create
* and activate a data pipeline in the user’s account and copy data
* (using the <code>SelectSqlQuery</code>) query from Amazon RDS to
* Amazon S3.</p>
* <p>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateDataSourceFromRDSRequest withRoleARN(String roleARN) {
setRoleARN(roleARN);
return this;
}
/**
* <p>
* The compute statistics for a <code>DataSource</code>. The statistics are
* generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally during
* an <code>MLModel</code> training. This parameter must be set to
* <code>true</code> if the <code></code>DataSource<code></code> needs to be
* used for <code>MLModel</code> training.
* </p>
*
* @param computeStatistics
* The compute statistics for a <code>DataSource</code>. The
* statistics are generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally
* during an <code>MLModel</code> training. This parameter must be
* set to <code>true</code> if the <code></code>DataSource
* <code></code> needs to be used for <code>MLModel</code> training.
*/
public void setComputeStatistics(Boolean computeStatistics) {
this.computeStatistics = computeStatistics;
}
/**
* <p>
* The compute statistics for a <code>DataSource</code>. The statistics are
* generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally during
* an <code>MLModel</code> training. This parameter must be set to
* <code>true</code> if the <code></code>DataSource<code></code> needs to be
* used for <code>MLModel</code> training.
* </p>
*
* @return The compute statistics for a <code>DataSource</code>. The
* statistics are generated from the observation data referenced by
* a <code>DataSource</code>. Amazon ML uses the statistics
* internally during an <code>MLModel</code> training. This
* parameter must be set to <code>true</code> if the <code></code>
* DataSource<code></code> needs to be used for <code>MLModel</code>
* training.
*/
public Boolean getComputeStatistics() {
return this.computeStatistics;
}
/**
* <p>
* The compute statistics for a <code>DataSource</code>. The statistics are
* generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally during
* an <code>MLModel</code> training. This parameter must be set to
* <code>true</code> if the <code></code>DataSource<code></code> needs to be
* used for <code>MLModel</code> training.
* </p>
*
* @param computeStatistics
* The compute statistics for a <code>DataSource</code>. The
* statistics are generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally
* during an <code>MLModel</code> training. This parameter must be
* set to <code>true</code> if the <code></code>DataSource
* <code></code> needs to be used for <code>MLModel</code> training.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public CreateDataSourceFromRDSRequest withComputeStatistics(
Boolean computeStatistics) {
setComputeStatistics(computeStatistics);
return this;
}
/**
* <p>
* The compute statistics for a <code>DataSource</code>. The statistics are
* generated from the observation data referenced by a
* <code>DataSource</code>. Amazon ML uses the statistics internally during
* an <code>MLModel</code> training. This parameter must be set to
* <code>true</code> if the <code></code>DataSource<code></code> needs to be
* used for <code>MLModel</code> training.
* </p>
*
* @return The compute statistics for a <code>DataSource</code>. The
* statistics are generated from the observation data referenced by
* a <code>DataSource</code>. Amazon ML uses the statistics
* internally during an <code>MLModel</code> training. This
* parameter must be set to <code>true</code> if the <code></code>
* DataSource<code></code> needs to be used for <code>MLModel</code>
* training.
*/
public Boolean isComputeStatistics() {
return this.computeStatistics;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDataSourceId() != null)
sb.append("DataSourceId: " + getDataSourceId() + ",");
if (getDataSourceName() != null)
sb.append("DataSourceName: " + getDataSourceName() + ",");
if (getRDSData() != null)
sb.append("RDSData: " + getRDSData() + ",");
if (getRoleARN() != null)
sb.append("RoleARN: " + getRoleARN() + ",");
if (getComputeStatistics() != null)
sb.append("ComputeStatistics: " + getComputeStatistics());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateDataSourceFromRDSRequest == false)
return false;
CreateDataSourceFromRDSRequest other = (CreateDataSourceFromRDSRequest) obj;
if (other.getDataSourceId() == null ^ this.getDataSourceId() == null)
return false;
if (other.getDataSourceId() != null
&& other.getDataSourceId().equals(this.getDataSourceId()) == false)
return false;
if (other.getDataSourceName() == null
^ this.getDataSourceName() == null)
return false;
if (other.getDataSourceName() != null
&& other.getDataSourceName().equals(this.getDataSourceName()) == false)
return false;
if (other.getRDSData() == null ^ this.getRDSData() == null)
return false;
if (other.getRDSData() != null
&& other.getRDSData().equals(this.getRDSData()) == false)
return false;
if (other.getRoleARN() == null ^ this.getRoleARN() == null)
return false;
if (other.getRoleARN() != null
&& other.getRoleARN().equals(this.getRoleARN()) == false)
return false;
if (other.getComputeStatistics() == null
^ this.getComputeStatistics() == null)
return false;
if (other.getComputeStatistics() != null
&& other.getComputeStatistics().equals(
this.getComputeStatistics()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getDataSourceId() == null) ? 0 : getDataSourceId()
.hashCode());
hashCode = prime
* hashCode
+ ((getDataSourceName() == null) ? 0 : getDataSourceName()
.hashCode());
hashCode = prime * hashCode
+ ((getRDSData() == null) ? 0 : getRDSData().hashCode());
hashCode = prime * hashCode
+ ((getRoleARN() == null) ? 0 : getRoleARN().hashCode());
hashCode = prime
* hashCode
+ ((getComputeStatistics() == null) ? 0
: getComputeStatistics().hashCode());
return hashCode;
}
@Override
public CreateDataSourceFromRDSRequest clone() {
return (CreateDataSourceFromRDSRequest) super.clone();
}
}
|
mhurne/aws-sdk-java
|
aws-java-sdk-machinelearning/src/main/java/com/amazonaws/services/machinelearning/model/CreateDataSourceFromRDSRequest.java
|
Java
|
apache-2.0
| 37,444
|
body {
margin:0px;
background-image:none;
position:relative;
left:-0px;
width:1198px;
margin-left:auto;
margin-right:auto;
text-align:left;
}
#base {
position:absolute;
z-index:0;
}
#u0 {
position:absolute;
left:54px;
top:356px;
width:185px;
height:25px;
}
#u0_input {
position:absolute;
left:0px;
top:0px;
width:185px;
height:25px;
font-family:'Arial Regular', 'Arial';
font-weight:700;
font-style:normal;
font-size:13px;
text-decoration:none;
color:#000000;
text-align:center;
}
#u1 {
position:absolute;
left:39px;
top:256px;
width:1081px;
height:60px;
}
#u1_img {
position:absolute;
left:0px;
top:0px;
width:1081px;
height:60px;
}
#u2 {
position:absolute;
left:2px;
top:22px;
width:1077px;
visibility:hidden;
word-wrap:break-word;
}
#u3 {
position:absolute;
left:60px;
top:279px;
width:1040px;
height:16px;
}
#u3_img {
position:absolute;
left:0px;
top:0px;
width:1040px;
height:16px;
}
#u4 {
position:absolute;
left:0px;
top:0px;
width:1040px;
word-wrap:break-word;
}
#u5 {
position:absolute;
left:265px;
top:356px;
width:135px;
height:25px;
}
#u5_input {
position:absolute;
left:0px;
top:0px;
width:135px;
height:25px;
font-family:'Arial Regular', 'Arial';
font-weight:700;
font-style:normal;
font-size:13px;
text-decoration:none;
color:#000000;
text-align:center;
}
#u7 {
position:absolute;
left:0px;
top:100px;
width:1200px;
height:30px;
}
#u7_img {
position:absolute;
left:0px;
top:0px;
width:1200px;
height:30px;
}
#u8 {
position:absolute;
left:2px;
top:7px;
width:1196px;
visibility:hidden;
word-wrap:break-word;
}
#u9 {
position:absolute;
left:3px;
top:102px;
width:1136px;
height:28px;
}
#u9_menu {
position:absolute;
left:-3px;
top:-3px;
width:1142px;
height:34px;
}
#u10 {
position:absolute;
left:0px;
top:0px;
width:1141px;
height:33px;
}
#u11 {
position:absolute;
left:0px;
top:0px;
width:56px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u11_img {
position:absolute;
left:0px;
top:0px;
width:56px;
height:28px;
}
#u12 {
position:absolute;
left:2px;
top:6px;
width:52px;
word-wrap:break-word;
}
#u13 {
position:absolute;
left:56px;
top:0px;
width:72px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u13_img {
position:absolute;
left:0px;
top:0px;
width:72px;
height:28px;
}
#u14 {
position:absolute;
left:2px;
top:6px;
width:68px;
word-wrap:break-word;
}
#u15 {
position:absolute;
left:128px;
top:0px;
width:61px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u15_img {
position:absolute;
left:0px;
top:0px;
width:61px;
height:28px;
}
#u16 {
position:absolute;
left:2px;
top:6px;
width:57px;
word-wrap:break-word;
}
#u17 {
position:absolute;
left:189px;
top:0px;
width:73px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u17_img {
position:absolute;
left:0px;
top:0px;
width:73px;
height:28px;
}
#u18 {
position:absolute;
left:2px;
top:6px;
width:69px;
word-wrap:break-word;
}
#u19 {
position:absolute;
left:262px;
top:0px;
width:132px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u19_img {
position:absolute;
left:0px;
top:0px;
width:132px;
height:28px;
}
#u20 {
position:absolute;
left:2px;
top:6px;
width:128px;
word-wrap:break-word;
}
#u21 {
position:absolute;
left:394px;
top:0px;
width:112px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u21_img {
position:absolute;
left:0px;
top:0px;
width:112px;
height:28px;
}
#u22 {
position:absolute;
left:2px;
top:6px;
width:108px;
word-wrap:break-word;
}
#u23 {
position:absolute;
left:506px;
top:0px;
width:89px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u23_img {
position:absolute;
left:0px;
top:0px;
width:89px;
height:28px;
}
#u24 {
position:absolute;
left:2px;
top:6px;
width:85px;
word-wrap:break-word;
}
#u25 {
position:absolute;
left:595px;
top:0px;
width:111px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u25_img {
position:absolute;
left:0px;
top:0px;
width:111px;
height:28px;
}
#u25_img.selected {
}
#u25.selected {
}
#u25_img.mouseOver {
}
#u25.mouseOver {
}
#u26 {
position:absolute;
left:2px;
top:6px;
width:107px;
word-wrap:break-word;
}
#u27 {
position:absolute;
left:706px;
top:0px;
width:86px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u27_img {
position:absolute;
left:0px;
top:0px;
width:86px;
height:28px;
}
#u27_img.selected {
}
#u27.selected {
}
#u27_img.mouseOver {
}
#u27.mouseOver {
}
#u28 {
position:absolute;
left:2px;
top:6px;
width:82px;
word-wrap:break-word;
}
#u29 {
position:absolute;
left:792px;
top:0px;
width:156px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u29_img {
position:absolute;
left:0px;
top:0px;
width:156px;
height:28px;
}
#u30 {
position:absolute;
left:2px;
top:6px;
width:152px;
word-wrap:break-word;
}
#u31 {
position:absolute;
left:948px;
top:0px;
width:103px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u31_img {
position:absolute;
left:0px;
top:0px;
width:103px;
height:28px;
}
#u32 {
position:absolute;
left:2px;
top:6px;
width:99px;
word-wrap:break-word;
}
#u33 {
position:absolute;
left:1051px;
top:0px;
width:85px;
height:28px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
color:#FFFFFF;
text-align:center;
}
#u33_img {
position:absolute;
left:0px;
top:0px;
width:85px;
height:28px;
}
#u34 {
position:absolute;
left:2px;
top:6px;
width:81px;
word-wrap:break-word;
}
#u35 {
position:absolute;
left:0px;
top:130px;
width:1200px;
height:31px;
}
#u35_img {
position:absolute;
left:0px;
top:0px;
width:1200px;
height:31px;
}
#u36 {
position:absolute;
left:2px;
top:8px;
width:1196px;
visibility:hidden;
word-wrap:break-word;
}
#u37 {
position:absolute;
left:20px;
top:131px;
width:650px;
height:26px;
}
#u37_menu {
position:absolute;
left:-3px;
top:-3px;
width:656px;
height:32px;
}
#u38 {
position:absolute;
left:0px;
top:0px;
width:655px;
height:31px;
}
#u39 {
position:absolute;
left:0px;
top:0px;
width:150px;
height:26px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u39_img {
position:absolute;
left:0px;
top:0px;
width:150px;
height:26px;
}
#u40 {
position:absolute;
left:2px;
top:6px;
width:146px;
word-wrap:break-word;
}
#u41 {
position:absolute;
left:150px;
top:0px;
width:174px;
height:26px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u41_img {
position:absolute;
left:0px;
top:0px;
width:174px;
height:26px;
}
#u42 {
position:absolute;
left:2px;
top:6px;
width:170px;
word-wrap:break-word;
}
#u43 {
position:absolute;
left:324px;
top:0px;
width:164px;
height:26px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u43_img {
position:absolute;
left:0px;
top:0px;
width:164px;
height:26px;
}
#u44 {
position:absolute;
left:2px;
top:6px;
width:160px;
word-wrap:break-word;
}
#u45 {
position:absolute;
left:488px;
top:0px;
width:162px;
height:26px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
color:#FFFFFF;
text-align:center;
}
#u45_img {
position:absolute;
left:0px;
top:0px;
width:162px;
height:26px;
}
#u46 {
position:absolute;
left:2px;
top:6px;
width:158px;
word-wrap:break-word;
}
#u47 {
position:absolute;
left:1137px;
top:106px;
width:60px;
height:20px;
color:#FFFFFF;
}
#u47_img {
position:absolute;
left:0px;
top:0px;
width:60px;
height:20px;
}
#u48 {
position:absolute;
left:2px;
top:2px;
width:56px;
word-wrap:break-word;
}
#u50 {
position:absolute;
left:0px;
top:0px;
width:1200px;
height:100px;
}
#u50_img {
position:absolute;
left:0px;
top:0px;
width:1200px;
height:100px;
}
#u51 {
position:absolute;
left:2px;
top:42px;
width:1196px;
visibility:hidden;
word-wrap:break-word;
}
#u52 {
position:absolute;
left:880px;
top:14px;
width:298px;
height:15px;
font-size:12px;
color:#FFFFFF;
text-align:right;
}
#u52_img {
position:absolute;
left:0px;
top:0px;
width:298px;
height:15px;
}
#u53 {
position:absolute;
left:0px;
top:0px;
width:298px;
word-wrap:break-word;
}
#u55 {
position:absolute;
left:26px;
top:14px;
width:271px;
height:70px;
}
#u55_img {
position:absolute;
left:0px;
top:0px;
width:271px;
height:70px;
}
#u56 {
position:absolute;
left:2px;
top:27px;
width:267px;
visibility:hidden;
word-wrap:break-word;
}
#u57 {
position:absolute;
left:838px;
top:39px;
}
#u57_state0 {
position:relative;
left:0px;
top:0px;
background-image:none;
}
#u57_state0_content {
position:absolute;
left:0px;
top:0px;
width:1px;
height:1px;
}
#u58 {
position:absolute;
left:0px;
top:23px;
visibility:hidden;
}
#u58_state0 {
position:relative;
left:0px;
top:0px;
background-image:none;
}
#u58_state0_content {
position:absolute;
left:0px;
top:0px;
width:1px;
height:1px;
}
#u59 {
position:absolute;
left:0px;
top:0px;
width:340px;
height:140px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:12px;
text-align:left;
}
#u59_img {
position:absolute;
left:0px;
top:0px;
width:350px;
height:150px;
}
#u60 {
position:absolute;
left:10px;
top:10px;
width:320px;
word-wrap:break-word;
}
#u61 {
position:absolute;
left:120px;
top:0px;
width:220px;
height:30px;
font-family:'Arial Bold', 'Arial';
font-weight:700;
font-style:normal;
font-size:11px;
color:#FF0000;
text-align:left;
}
#u61_img {
position:absolute;
left:0px;
top:0px;
width:230px;
height:40px;
}
#u62 {
position:absolute;
left:10px;
top:8px;
width:200px;
word-wrap:break-word;
}
#u63 {
position:absolute;
left:37px;
top:190px;
width:218px;
height:37px;
}
#u63_img {
position:absolute;
left:0px;
top:0px;
width:218px;
height:37px;
}
#u64 {
position:absolute;
left:0px;
top:0px;
width:218px;
white-space:nowrap;
}
#u66 {
position:absolute;
left:0px;
top:550px;
width:1200px;
height:50px;
}
#u66_img {
position:absolute;
left:0px;
top:0px;
width:1200px;
height:50px;
}
#u67 {
position:absolute;
left:2px;
top:17px;
width:1196px;
visibility:hidden;
word-wrap:break-word;
}
#u68 {
position:absolute;
left:9px;
top:568px;
width:521px;
height:13px;
font-size:10px;
color:#FFFFFF;
}
#u68_img {
position:absolute;
left:0px;
top:0px;
width:521px;
height:13px;
}
#u69 {
position:absolute;
left:0px;
top:0px;
width:521px;
word-wrap:break-word;
}
#u70 {
position:absolute;
left:-5px;
top:154px;
width:10px;
height:446px;
}
#u70_start {
position:absolute;
left:-5px;
top:0px;
width:20px;
height:18px;
}
#u70_end {
position:absolute;
left:-5px;
top:429px;
width:20px;
height:18px;
}
#u70_line {
position:absolute;
left:5px;
top:0px;
width:1px;
height:446px;
}
#u71 {
position:absolute;
left:1193px;
top:154px;
width:10px;
height:446px;
}
#u71_start {
position:absolute;
left:-5px;
top:0px;
width:20px;
height:18px;
}
#u71_end {
position:absolute;
left:-5px;
top:429px;
width:20px;
height:18px;
}
#u71_line {
position:absolute;
left:5px;
top:0px;
width:1px;
height:446px;
}
|
VHAINNOVATIONS/Mental-Health-eScreening
|
UIResources/DashboardWireframes/files/battery_delete/styles.css
|
CSS
|
apache-2.0
| 12,985
|
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Compute.V1.Snippets
{
// [START compute_v1_generated_RegionCommitments_Insert_async]
using Google.Cloud.Compute.V1;
using System.Threading.Tasks;
using lro = Google.LongRunning;
public sealed partial class GeneratedRegionCommitmentsClientSnippets
{
/// <summary>Snippet for InsertAsync</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public async Task InsertRequestObjectAsync()
{
// Create client
RegionCommitmentsClient regionCommitmentsClient = await RegionCommitmentsClient.CreateAsync();
// Initialize request argument(s)
InsertRegionCommitmentRequest request = new InsertRegionCommitmentRequest
{
RequestId = "",
Region = "",
Project = "",
CommitmentResource = new Commitment(),
};
// Make the request
lro::Operation<Operation, Operation> response = await regionCommitmentsClient.InsertAsync(request);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = await regionCommitmentsClient.PollOnceInsertAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
}
}
// [END compute_v1_generated_RegionCommitments_Insert_async]
}
|
googleapis/google-cloud-dotnet
|
apis/Google.Cloud.Compute.V1/Google.Cloud.Compute.V1.GeneratedSnippets/RegionCommitmentsClient.InsertRequestObjectAsyncSnippet.g.cs
|
C#
|
apache-2.0
| 2,813
|
# FuncLog
## Description
FuncLog is part of the [_General Inductive Logic Programming System_ (GILPS)](http://www.doc.ic.ac.uk/~jcs06/GILPS/)
developed by [José Carlos Almeida Santos](http://www.doc.ic.ac.uk/~jcs06/).
GILPS is written in Prolog and requires at least [YAP 6.0](http://www.dcc.fc.up.pt/~vsc/Yap/downloads.html).
FuncLog was designed to learn on [Head Output Connected learning problems](http://www.doc.ic.ac.uk/~shm/Papers/EPIA09.pdf).
## Installation
To make use of the GILPS an installed [YAP](http://www.dcc.fc.up.pt/~vsc/Yap/downloads.html) system is required.
There are YAP software packages for the most common Linux distributions as well as binary and source distributions
on the [YAP web page](http://www.dcc.fc.up.pt/~vsc/Yap/downloads.html). In any case you should make sure that the YAP
executable is in your `$PATH`.
Having installed YAP, the actual GILPS can be downloaded at http://www.doc.ic.ac.uk/~jcs06/GILPS/GILPS.tar.bz2 .
The tar archive contains the GILPS Prolog sources which need to be extracted to `SML-Bench/learningsystems/funclog`.
The directory should then contain at least the following files:
```bash
SML-Bench/learningsystems/funclog $ ls -F
funclog.patch Makefile README.md run* source/ validate*
```
To make FuncLog work correctly you will first have to apply the patch file `funclog.patch` (residing in the
funclog directory) as follows:
```bash
SML-Bench/learningsystems/funclog $ patch source/engine/funclog.pl funclog.patch
patching file source/engine/funclog.pl
```
No further steps are required. In particular there is **no** need to call `make`.
|
AKSW/SML-Bench
|
learningsystems/funclog/README.md
|
Markdown
|
apache-2.0
| 1,652
|
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.web.servlet;
import java.util.concurrent.Executors;
import com.gargoylesoftware.htmlunit.BrowserVersion;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.test.web.htmlunit.webdriver.LocalHostWebConnectionHtmlUnitDriver;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.security.concurrent.DelegatingSecurityContextExecutor;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.htmlunit.webdriver.MockMvcHtmlUnitDriverBuilder;
import org.springframework.util.ClassUtils;
/**
* Auto-configuration for Selenium {@link WebDriver} MockMVC integration.
*
* @author Phillip Webb
* @since 1.4.0
*/
@Configuration
@ConditionalOnClass(HtmlUnitDriver.class)
@AutoConfigureAfter(MockMvcAutoConfiguration.class)
@ConditionalOnProperty(prefix = "spring.test.mockmvc.webdriver", name = "enabled", matchIfMissing = true)
public class MockMvcWebDriverAutoConfiguration {
private static final String SECURITY_CONTEXT_EXECUTOR = "org.springframework.security.concurrent.DelegatingSecurityContextExecutor";
@Bean
@ConditionalOnMissingBean({ WebDriver.class, MockMvcHtmlUnitDriverBuilder.class })
@ConditionalOnBean(MockMvc.class)
public MockMvcHtmlUnitDriverBuilder mockMvcHtmlUnitDriverBuilder(MockMvc mockMvc,
Environment environment) {
return MockMvcHtmlUnitDriverBuilder.mockMvcSetup(mockMvc)
.withDelegate(new LocalHostWebConnectionHtmlUnitDriver(environment,
BrowserVersion.CHROME));
}
@Bean
@ConditionalOnMissingBean(WebDriver.class)
@ConditionalOnBean(MockMvcHtmlUnitDriverBuilder.class)
public HtmlUnitDriver htmlUnitDriver(MockMvcHtmlUnitDriverBuilder builder) {
HtmlUnitDriver driver = builder.build();
if (ClassUtils.isPresent(SECURITY_CONTEXT_EXECUTOR,
getClass().getClassLoader())) {
driver.setExecutor(new DelegatingSecurityContextExecutor(
Executors.newSingleThreadExecutor()));
}
return driver;
}
}
|
hello2009chen/spring-boot
|
spring-boot-project/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/web/servlet/MockMvcWebDriverAutoConfiguration.java
|
Java
|
apache-2.0
| 3,164
|
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
import os.path
import mock
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import testtools
import webob
import webob.dec
import webob.exc
from neutron.agent.common import config as agent_config
from neutron.agent.common import ovs_lib
from neutron.agent.l3 import agent as neutron_l3_agent
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespace_manager
from neutron.agent.l3 import namespaces
from neutron.agent import l3_agent as l3_agent_main
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.callbacks import events
from neutron.callbacks import manager
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import config as common_config
from neutron.common import constants as l3_constants
from neutron.common import utils as common_utils
from neutron.openstack.common import uuidutils
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux import base
from neutron.tests.functional.agent.linux import helpers
from neutron.tests.unit.agent.l3 import test_agent as test_l3_agent
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
METADATA_REQUEST_TIMEOUT = 60
def get_ovs_bridge(br_name):
return ovs_lib.OVSBridge(br_name)
class L3AgentTestFramework(base.BaseLinuxTestCase):
def setUp(self):
super(L3AgentTestFramework, self).setUp()
mock.patch('neutron.agent.l3.agent.L3PluginApi').start()
# TODO(pcm): Move this to BaseTestCase, if we find that more tests
# use this mechanism.
self._callback_manager = manager.CallbacksManager()
mock.patch.object(registry, '_get_callback_manager',
return_value=self._callback_manager).start()
self.agent = self._configure_agent('agent1')
def _get_config_opts(self):
config = cfg.ConfigOpts()
config.register_opts(common_config.core_opts)
config.register_opts(common_config.core_cli_opts)
logging.register_options(config)
agent_config.register_process_monitor_opts(config)
return config
def _configure_agent(self, host):
conf = self._get_config_opts()
l3_agent_main.register_opts(conf)
cfg.CONF.set_override('debug', False)
agent_config.setup_logging()
conf.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
conf.set_override('router_delete_namespaces', True)
br_int = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
br_ex = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
conf.set_override('ovs_integration_bridge', br_int.br_name)
conf.set_override('external_network_bridge', br_ex.br_name)
temp_dir = self.get_new_temp_dir()
get_temp_file_path = functools.partial(self.get_temp_file_path,
root=temp_dir)
conf.set_override('state_path', temp_dir.path)
conf.set_override('metadata_proxy_socket',
get_temp_file_path('metadata_proxy'))
conf.set_override('ha_confs_path',
get_temp_file_path('ha_confs'))
conf.set_override('external_pids',
get_temp_file_path('external/pids'))
conf.set_override('host', host)
agent = neutron_l3_agent.L3NATAgentWithStateReport(host, conf)
mock.patch.object(ip_lib, '_arping').start()
return agent
def generate_router_info(self, enable_ha, ip_version=4, extra_routes=True,
enable_fip=True, enable_snat=True,
dual_stack=False, v6_ext_gw_with_sub=True):
if ip_version == 6 and not dual_stack:
enable_snat = False
enable_fip = False
extra_routes = False
if not v6_ext_gw_with_sub:
self.agent.conf.set_override('ipv6_gateway',
'fe80::f816:3eff:fe2e:1')
return test_l3_agent.prepare_router_data(ip_version=ip_version,
enable_snat=enable_snat,
enable_floating_ip=enable_fip,
enable_ha=enable_ha,
extra_routes=extra_routes,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
def manage_router(self, agent, router):
self.addCleanup(self._delete_router, agent, router['id'])
ri = self._create_router(agent, router)
return ri
def _create_router(self, agent, router):
agent._process_added_router(router)
return agent.router_info[router['id']]
def _delete_router(self, agent, router_id):
agent._router_removed(router_id)
def _add_fip(self, router, fip_address, fixed_address='10.0.0.2',
host=None):
fip = {'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': fip_address,
'fixed_ip_address': fixed_address,
'host': host}
router.router[l3_constants.FLOATINGIP_KEY].append(fip)
def _add_internal_interface_by_subnet(self, router, count=1,
ip_version=4,
ipv6_subnet_modes=None,
interface_id=None):
return test_l3_agent.router_append_subnet(router, count,
ip_version, ipv6_subnet_modes, interface_id)
def _namespace_exists(self, namespace):
ip = ip_lib.IPWrapper(namespace=namespace)
return ip.netns.exists(namespace)
def _metadata_proxy_exists(self, conf, router):
pm = external_process.ProcessManager(
conf,
router.router_id,
router.ns_name)
return pm.active
def device_exists_with_ips_and_mac(self, expected_device, name_getter,
namespace):
ip_cidrs = common_utils.fixed_ip_cidrs(expected_device['fixed_ips'])
return ip_lib.device_exists_with_ips_and_mac(
name_getter(expected_device['id']), ip_cidrs,
expected_device['mac_address'], namespace)
@staticmethod
def _port_first_ip_cidr(port):
fixed_ip = port['fixed_ips'][0]
return common_utils.ip_to_cidr(fixed_ip['ip_address'],
fixed_ip['prefixlen'])
def get_device_mtu(self, target_device, name_getter, namespace):
device = ip_lib.IPDevice(name_getter(target_device), namespace)
return device.link.mtu
def get_expected_keepalive_configuration(self, router):
router_id = router.router_id
ha_device_name = router.get_ha_device_name()
ha_device_cidr = self._port_first_ip_cidr(router.ha_port)
external_port = router.get_ex_gw_port()
ex_port_ipv6 = ip_lib.get_ipv6_lladdr(external_port['mac_address'])
external_device_name = router.get_external_device_name(
external_port['id'])
external_device_cidr = self._port_first_ip_cidr(external_port)
internal_port = router.router[l3_constants.INTERFACE_KEY][0]
int_port_ipv6 = ip_lib.get_ipv6_lladdr(internal_port['mac_address'])
internal_device_name = router.get_internal_device_name(
internal_port['id'])
internal_device_cidr = self._port_first_ip_cidr(internal_port)
floating_ip_cidr = common_utils.ip_to_cidr(
router.get_floating_ips()[0]['floating_ip_address'])
default_gateway_ip = external_port['subnets'][0].get('gateway_ip')
return """vrrp_instance VR_1 {
state BACKUP
interface %(ha_device_name)s
virtual_router_id 1
priority 50
nopreempt
advert_int 2
track_interface {
%(ha_device_name)s
}
virtual_ipaddress {
169.254.0.1/24 dev %(ha_device_name)s
}
virtual_ipaddress_excluded {
%(floating_ip_cidr)s dev %(external_device_name)s
%(external_device_cidr)s dev %(external_device_name)s
%(internal_device_cidr)s dev %(internal_device_name)s
%(ex_port_ipv6)s dev %(external_device_name)s scope link
%(int_port_ipv6)s dev %(internal_device_name)s scope link
}
virtual_routes {
0.0.0.0/0 via %(default_gateway_ip)s dev %(external_device_name)s
8.8.8.0/24 via 19.4.4.4
}
}""" % {
'router_id': router_id,
'ha_device_name': ha_device_name,
'ha_device_cidr': ha_device_cidr,
'external_device_name': external_device_name,
'external_device_cidr': external_device_cidr,
'internal_device_name': internal_device_name,
'internal_device_cidr': internal_device_cidr,
'floating_ip_cidr': floating_ip_cidr,
'default_gateway_ip': default_gateway_ip,
'int_port_ipv6': int_port_ipv6,
'ex_port_ipv6': ex_port_ipv6
}
def _get_rule(self, iptables_manager, table, chain, predicate):
rules = iptables_manager.get_chain(table, chain)
result = next(rule for rule in rules if predicate(rule))
return result
def _assert_router_does_not_exist(self, router):
# If the namespace assertion succeeds
# then the devices and iptable rules have also been deleted,
# so there's no need to check that explicitly.
self.assertFalse(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: not self._metadata_proxy_exists(self.agent.conf, router))
def _assert_snat_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'snat'))
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'POSTROUTING'))
def _assert_floating_ip_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'float-snat'))
def _assert_metadata_chains(self, router):
metadata_port_filter = lambda rule: (
str(self.agent.conf.metadata_port) in rule.rule)
self.assertTrue(self._get_rule(router.iptables_manager,
'nat',
'PREROUTING',
metadata_port_filter))
self.assertTrue(self._get_rule(router.iptables_manager,
'filter',
'INPUT',
metadata_port_filter))
def _assert_internal_devices(self, router):
internal_devices = router.router[l3_constants.INTERFACE_KEY]
self.assertTrue(len(internal_devices))
for device in internal_devices:
self.assertTrue(self.device_exists_with_ips_and_mac(
device, router.get_internal_device_name, router.ns_name))
def _assert_extra_routes(self, router):
routes = ip_lib.get_routing_table(namespace=router.ns_name)
routes = [{'nexthop': route['nexthop'],
'destination': route['destination']} for route in routes]
for extra_route in router.router['routes']:
self.assertIn(extra_route, routes)
def _assert_interfaces_deleted_from_ovs(self):
def assert_ovs_bridge_empty(bridge_name):
bridge = ovs_lib.OVSBridge(bridge_name)
self.assertFalse(bridge.get_port_name_list())
assert_ovs_bridge_empty(self.agent.conf.ovs_integration_bridge)
assert_ovs_bridge_empty(self.agent.conf.external_network_bridge)
def floating_ips_configured(self, router):
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
external_port = router.get_ex_gw_port()
return len(floating_ips) and all(
ip_lib.device_exists_with_ips_and_mac(
router.get_external_device_name(external_port['id']),
['%s/32' % fip['floating_ip_address']],
external_port['mac_address'],
namespace=router.ns_name) for fip in floating_ips)
def fail_ha_router(self, router):
device_name = router.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, router.ns_name)
ha_device.link.set_down()
class L3AgentTestCase(L3AgentTestFramework):
def test_keepalived_state_change_notification(self):
enqueue_mock = mock.patch.object(
self.agent, 'enqueue_state_change').start()
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router.ha_state == 'master')
self.fail_ha_router(router)
utils.wait_until_true(lambda: router.ha_state == 'backup')
utils.wait_until_true(lambda: enqueue_mock.call_count == 3)
calls = [args[0] for args in enqueue_mock.call_args_list]
self.assertEqual((router.router_id, 'backup'), calls[0])
self.assertEqual((router.router_id, 'master'), calls[1])
self.assertEqual((router.router_id, 'backup'), calls[2])
def _expected_rpc_report(self, expected):
calls = (args[0][1] for args in
self.agent.plugin_rpc.update_ha_routers_states.call_args_list)
# Get the last state reported for each router
actual_router_states = {}
for call in calls:
for router_id, state in call.iteritems():
actual_router_states[router_id] = state
return actual_router_states == expected
def test_keepalived_state_change_bulk_rpc(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self.fail_ha_router(router1)
router_info = self.generate_router_info(enable_ha=True)
router2 = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router1.ha_state == 'backup')
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(
lambda: self._expected_rpc_report(
{router1.router_id: 'standby', router2.router_id: 'active'}))
def test_agent_notifications_for_router_events(self):
"""Test notifications for router create, update, and delete.
Make sure that when the agent sends notifications of router events
for router create, update, and delete, that the correct handler is
called with the right resource, event, and router information.
"""
event_handler = mock.Mock()
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_DELETE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_DELETE)
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self.agent._process_updated_router(router.router)
self._delete_router(self.agent, router.router_id)
expected_calls = [
mock.call('router', 'before_create', self.agent, router=router),
mock.call('router', 'after_create', self.agent, router=router),
mock.call('router', 'before_update', self.agent, router=router),
mock.call('router', 'after_update', self.agent, router=router),
mock.call('router', 'before_delete', self.agent, router=router),
mock.call('router', 'after_delete', self.agent, router=router)]
event_handler.assert_has_calls(expected_calls)
def test_legacy_router_lifecycle(self):
self._router_lifecycle(enable_ha=False, dual_stack=True)
def test_legacy_router_lifecycle_with_no_gateway_subnet(self):
self._router_lifecycle(enable_ha=False, dual_stack=True,
v6_ext_gw_with_sub=False)
def test_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True)
def test_conntrack_disassociate_fip(self):
'''Test that conntrack immediately drops stateful connection
that uses floating IP once it's disassociated.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
port = helpers.get_free_namespace_port(router.ns_name)
client_address = '19.4.4.3'
server_address = '35.4.0.4'
def clean_fips(router):
router.router[l3_constants.FLOATINGIP_KEY] = []
clean_fips(router)
self._add_fip(router, client_address, fixed_address=server_address)
router.process(self.agent)
router_ns = ip_lib.IPWrapper(namespace=router.ns_name)
netcat = helpers.NetcatTester(router_ns, router_ns,
server_address, port,
client_address=client_address,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
def assert_num_of_conntrack_rules(n):
out = router_ns.netns.execute(["conntrack", "-L",
"--orig-src", client_address])
self.assertEqual(
n, len([line for line in out.strip().split('\n') if line]))
with self.assert_max_execution_time(100):
assert_num_of_conntrack_rules(0)
self.assertTrue(netcat.test_connectivity())
assert_num_of_conntrack_rules(1)
clean_fips(router)
router.process(self.agent)
assert_num_of_conntrack_rules(0)
with testtools.ExpectedException(RuntimeError):
netcat.test_connectivity()
def test_ipv6_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True, ip_version=6)
def test_keepalived_configuration(self):
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
expected = self.get_expected_keepalive_configuration(router)
self.assertEqual(expected,
router.keepalived_manager.get_conf_on_disk())
# Add a new FIP and change the GW IP address
router.router = copy.deepcopy(router.router)
existing_fip = '19.4.4.2'
new_fip = '19.4.4.3'
self._add_fip(router, new_fip)
subnet_id = _uuid()
fixed_ips = [{'ip_address': '19.4.4.10',
'prefixlen': 24,
'subnet_id': subnet_id}]
subnets = [{'id': subnet_id,
'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.5'}]
router.router['gw_port']['subnets'] = subnets
router.router['gw_port']['fixed_ips'] = fixed_ips
router.process(self.agent)
# Get the updated configuration and assert that both FIPs are in,
# and that the GW IP address was updated.
new_config = router.keepalived_manager.config.get_config_str()
old_gw = '0.0.0.0/0 via 19.4.4.1'
new_gw = '0.0.0.0/0 via 19.4.4.5'
old_external_device_ip = '19.4.4.4'
new_external_device_ip = '19.4.4.10'
self.assertIn(existing_fip, new_config)
self.assertIn(new_fip, new_config)
self.assertNotIn(old_gw, new_config)
self.assertIn(new_gw, new_config)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
self.assertNotIn('%s/24 dev %s' %
(old_external_device_ip, external_device_name),
new_config)
self.assertIn('%s/24 dev %s' %
(new_external_device_ip, external_device_name),
new_config)
def test_periodic_sync_routers_task(self):
routers_to_keep = []
routers_to_delete = []
ns_names_to_retrieve = set()
for i in range(2):
routers_to_keep.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_keep[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_keep[i]['id'])
for i in range(2):
routers_to_delete.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_delete[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_delete[i]['id'])
# Mock the plugin RPC API to Simulate a situation where the agent
# was handling the 4 routers created above, it went down and after
# starting up again, two of the routers were deleted via the API
mocked_get_routers = (
neutron_l3_agent.L3PluginApi.return_value.get_routers)
mocked_get_routers.return_value = routers_to_keep
# Synchonize the agent with the plug-in
with mock.patch.object(namespace_manager.NamespaceManager, 'list_all',
return_value=ns_names_to_retrieve):
self.agent.periodic_sync_routers_task(self.agent.context)
# Mock the plugin RPC API so a known external network id is returned
# when the router updates are processed by the agent
external_network_id = _uuid()
mocked_get_external_network_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_get_external_network_id.return_value = external_network_id
# Plug external_gateway_info in the routers that are not going to be
# deleted by the agent when it processes the updates. Otherwise,
# _process_router_if_compatible in the agent fails
for i in range(2):
routers_to_keep[i]['external_gateway_info'] = {'network_id':
external_network_id}
# Have the agent process the update from the plug-in and verify
# expected behavior
for _ in routers_to_keep + routers_to_delete:
self.agent._process_router_update()
for i in range(2):
self.assertIn(routers_to_keep[i]['id'], self.agent.router_info)
self.assertTrue(self._namespace_exists(namespaces.NS_PREFIX +
routers_to_keep[i]['id']))
for i in range(2):
self.assertNotIn(routers_to_delete[i]['id'],
self.agent.router_info)
self.assertFalse(self._namespace_exists(
namespaces.NS_PREFIX + routers_to_delete[i]['id']))
def _router_lifecycle(self, enable_ha, ip_version=4,
dual_stack=False, v6_ext_gw_with_sub=True):
router_info = self.generate_router_info(enable_ha, ip_version,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
router = self.manage_router(self.agent, router_info)
# Add multiple-IPv6-prefix internal router port
slaac = l3_constants.IPV6_SLAAC
slaac_mode = {'ra_mode': slaac, 'address_mode': slaac}
subnet_modes = [slaac_mode] * 2
self._add_internal_interface_by_subnet(router.router, count=2,
ip_version=6, ipv6_subnet_modes=subnet_modes)
router.process(self.agent)
if enable_ha:
port = router.get_ex_gw_port()
interface_name = router.get_external_device_name(port['id'])
self._assert_no_ip_addresses_on_interface(router.ns_name,
interface_name)
utils.wait_until_true(lambda: router.ha_state == 'master')
# Keepalived notifies of a state transition when it starts,
# not when it ends. Thus, we have to wait until keepalived finishes
# configuring everything. We verify this by waiting until the last
# device has an IP address.
device = router.router[l3_constants.INTERFACE_KEY][-1]
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
device,
router.get_internal_device_name,
router.ns_name)
utils.wait_until_true(device_exists)
self.assertTrue(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_external_device(router)
if not (enable_ha and (ip_version == 6 or dual_stack)):
# Note(SridharG): enable the assert_gateway for IPv6 once
# keepalived on Ubuntu14.04 (i.e., check-neutron-dsvm-functional
# platform) is updated to 1.2.10 (or above).
# For more details: https://review.openstack.org/#/c/151284/
self._assert_gateway(router, v6_ext_gw_with_sub)
self.assertTrue(self.floating_ips_configured(router))
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_extra_routes(router)
self._assert_metadata_chains(router)
if enable_ha:
self._assert_ha_device(router)
self.assertTrue(router.keepalived_manager.get_process().active)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
if enable_ha:
self.assertFalse(router.keepalived_manager.get_process().active)
def _assert_external_device(self, router):
external_port = router.get_ex_gw_port()
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
router.ns_name))
def _assert_gateway(self, router, v6_ext_gw_with_sub=True):
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=router.ns_name)
for subnet in external_port['subnets']:
self._gateway_check(subnet['gateway_ip'], external_device)
if not v6_ext_gw_with_sub:
self._gateway_check(self.agent.conf.ipv6_gateway,
external_device)
def _gateway_check(self, gateway_ip, external_device):
expected_gateway = gateway_ip
ip_vers = netaddr.IPAddress(expected_gateway).version
existing_gateway = (external_device.route.get_gateway(
ip_version=ip_vers).get('gateway'))
self.assertEqual(expected_gateway, existing_gateway)
def _assert_ha_device(self, router):
def ha_router_dev_name_getter(not_used):
return router.get_ha_device_name()
self.assertTrue(self.device_exists_with_ips_and_mac(
router.router[l3_constants.HA_INTERFACE_KEY],
ha_router_dev_name_getter, router.ns_name))
@classmethod
def _get_addresses_on_device(cls, namespace, interface):
return [address['cidr'] for address in
ip_lib.IPDevice(interface, namespace=namespace).addr.list()]
def _assert_no_ip_addresses_on_interface(self, namespace, interface):
self.assertEqual(
[], self._get_addresses_on_device(namespace, interface))
def test_ha_router_conf_on_restarted_agent(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12')
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self._create_router(restarted_agent, router1.router)
utils.wait_until_true(lambda: self.floating_ips_configured(router1))
self.assertIn(
router1._get_primary_vip(),
self._get_addresses_on_device(
router1.ns_name,
router1.get_ha_device_name()))
def test_fip_connection_from_same_subnet(self):
'''Test connection to floatingip which is associated with
fixed_ip on the same subnet of the source fixed_ip.
In other words it confirms that return packets surely
go through the router.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
router_ip = router_ip_cidr.partition('/')[0]
src_ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
dst_ip_cidr = net_helpers.increment_ip_cidr(src_ip_cidr)
dst_ip = dst_ip_cidr.partition('/')[0]
dst_fip = '19.4.4.10'
router.router[l3_constants.FLOATINGIP_KEY] = []
self._add_fip(router, dst_fip, fixed_address=dst_ip)
router.process(self.agent)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by fake machines
src_ns = self._create_namespace(prefix='test-src-')
src_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, src_ns.namespace)).port
src_port.addr.add(src_ip_cidr)
net_helpers.set_namespace_gateway(src_port, router_ip)
dst_ns = self._create_namespace(prefix='test-dst-')
dst_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, dst_ns.namespace)).port
dst_port.addr.add(dst_ip_cidr)
net_helpers.set_namespace_gateway(dst_port, router_ip)
protocol_port = helpers.get_free_namespace_port(dst_ns)
# client sends to fip
netcat = helpers.NetcatTester(src_ns, dst_ns, dst_ip,
protocol_port,
client_address=dst_fip,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
self.assertTrue(netcat.test_connectivity())
class L3HATestFramework(L3AgentTestFramework):
NESTED_NAMESPACE_SEPARATOR = '@'
def setUp(self):
super(L3HATestFramework, self).setUp()
self.failover_agent = self._configure_agent('agent2')
br_int_1 = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
br_int_2 = get_ovs_bridge(
self.failover_agent.conf.ovs_integration_bridge)
veth1, veth2 = self.create_veth()
br_int_1.add_port(veth1.name)
br_int_2.add_port(veth2.name)
def test_ha_router_failover(self):
router_info = self.generate_router_info(enable_ha=True)
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router1 = self.manage_router(self.agent, router_info)
router_info_2 = copy.deepcopy(router_info)
router_info_2[l3_constants.HA_INTERFACE_KEY] = (
test_l3_agent.get_ha_interface(ip='169.254.192.2',
mac='22:22:22:22:22:22'))
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info_2['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.failover_agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router2 = self.manage_router(self.failover_agent, router_info_2)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
device_name = router1.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, namespace=router1.ns_name)
ha_device.link.set_down()
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
class MetadataFakeProxyHandler(object):
def __init__(self, status):
self.status = status
@webob.dec.wsgify()
def __call__(self, req):
return webob.Response(status=self.status)
class MetadataL3AgentTestCase(L3AgentTestFramework):
SOCKET_MODE = 0o644
def _create_metadata_fake_server(self, status):
server = utils.UnixDomainWSGIServer('metadata-fake-server')
self.addCleanup(server.stop)
# NOTE(cbrandily): TempDir fixture creates a folder with 0o700
# permissions but metadata_proxy_socket folder must be readable by all
# users
self.useFixture(
helpers.RecursivePermDirFixture(
os.path.dirname(self.agent.conf.metadata_proxy_socket), 0o555))
server.start(MetadataFakeProxyHandler(status),
self.agent.conf.metadata_proxy_socket,
workers=0, backlog=4096, mode=self.SOCKET_MODE)
def test_access_to_metadata_proxy(self):
"""Test access to the l3-agent metadata proxy.
The test creates:
* A l3-agent metadata service:
* A router (which creates a metadata proxy in the router namespace),
* A fake metadata server
* A "client" namespace (simulating a vm) with a port on router
internal subnet.
The test queries from the "client" namespace the metadata proxy on
http://169.254.169.254 and asserts that the metadata proxy added
the X-Forwarded-For and X-Neutron-Router-Id headers to the request
and forwarded the http request to the fake metadata server and the
response to the "client" namespace.
"""
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self._create_metadata_fake_server(webob.exc.HTTPOk.code)
# Create and configure client namespace
client_ns = self._create_namespace()
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by a fake machine
port = self.useFixture(
net_helpers.OVSPortFixture(br_int, client_ns.namespace)).port
port.addr.add(ip_cidr)
net_helpers.set_namespace_gateway(port,
router_ip_cidr.partition('/')[0])
# Query metadata proxy
url = 'http://%(host)s:%(port)s' % {'host': dhcp.METADATA_DEFAULT_IP,
'port': dhcp.METADATA_PORT}
cmd = 'curl', '--max-time', METADATA_REQUEST_TIMEOUT, '-D-', url
try:
raw_headers = client_ns.netns.execute(cmd)
except RuntimeError:
self.fail('metadata proxy unreachable on %s before timeout' % url)
# Check status code
firstline = raw_headers.splitlines()[0]
self.assertIn(str(webob.exc.HTTPOk.code), firstline.split())
class UnprivilegedUserMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
"""
SOCKET_MODE = 0o664
def setUp(self):
super(UnprivilegedUserMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class UnprivilegedUserGroupMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user/group.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
Its group has gid=65534 and is commonly named 'nobody' or 'nogroup', that's
why we use its gid.
"""
SOCKET_MODE = 0o666
def setUp(self):
super(UnprivilegedUserGroupMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_group', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class TestDvrRouter(L3AgentTestFramework):
def test_dvr_router_lifecycle_without_ha_without_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=False)
def test_dvr_router_lifecycle_without_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True)
def _helper_create_dvr_router_fips_for_ext_network(
self, agent_mode, **dvr_router_kwargs):
self.agent.conf.agent_mode = agent_mode
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
router = self.manage_router(self.agent, router_info)
fip_ns = router.fip_ns.get_name()
return router, fip_ns
def _validate_fips_for_external_network(self, router, fip_ns):
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_dvr_floating_ips(router)
self._assert_snat_namespace_does_not_exist(router)
def test_dvr_router_fips_for_multiple_ext_networks(self):
agent_mode = 'dvr'
# Create the first router fip with external net1
dvr_router1_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router1, fip1_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router1_kwargs))
# Validate the fip with external net1
self._validate_fips_for_external_network(router1, fip1_ns)
# Create the second router fip with external net2
dvr_router2_kwargs = {'ip_address': '19.4.5.3',
'subnet_cidr': '19.4.5.0/24',
'gateway_ip': '19.4.5.1',
'gateway_mac': 'ca:fe:de:ab:cd:fe'}
router2, fip2_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router2_kwargs))
# Validate the fip with external net2
self._validate_fips_for_external_network(router2, fip2_ns)
def _dvr_router_lifecycle(self, enable_ha=False, enable_snat=False,
custom_mtu=2000):
'''Test dvr router lifecycle
:param enable_ha: sets the ha value for the router.
:param enable_snat: the value of enable_snat is used
to set the agent_mode.
'''
# The value of agent_mode can be dvr, dvr_snat, or legacy.
# Since by definition this is a dvr (distributed = true)
# only dvr and dvr_snat are applicable
self.agent.conf.agent_mode = 'dvr_snat' if enable_snat else 'dvr'
self.agent.conf.network_device_mtu = custom_mtu
# We get the router info particular to a dvr router
router_info = self.generate_dvr_router_info(
enable_ha, enable_snat)
# We need to mock the get_agent_gateway_port return value
# because the whole L3PluginApi is mocked and we need the port
# gateway_port information before the l3_agent will create it.
# The port returned needs to have the same information as
# router_info['gw_port']
mocked_gw_port = (
neutron_l3_agent.L3PluginApi.return_value.get_agent_gateway_port)
mocked_gw_port.return_value = router_info['gw_port']
# We also need to mock the get_external_network_id method to
# get the correct fip namespace.
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
# With all that set we can now ask the l3_agent to
# manage the router (create it, create namespaces,
# attach interfaces, etc...)
router = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_dvr_external_device(router)
self._assert_dvr_gateway(router)
self._assert_dvr_floating_ips(router)
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_metadata_chains(router)
self._assert_extra_routes(router)
self._assert_rfp_fpr_mtu(router, custom_mtu)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
def generate_dvr_router_info(
self, enable_ha=False, enable_snat=False, **kwargs):
router = test_l3_agent.prepare_router_data(
enable_snat=enable_snat,
enable_floating_ip=True,
enable_ha=enable_ha,
**kwargs)
internal_ports = router.get(l3_constants.INTERFACE_KEY, [])
router['distributed'] = True
router['gw_port_host'] = self.agent.conf.host
router['gw_port']['binding:host_id'] = self.agent.conf.host
floating_ip = router['_floatingips'][0]
floating_ip['floating_network_id'] = router['gw_port']['network_id']
floating_ip['host'] = self.agent.conf.host
floating_ip['port_id'] = internal_ports[0]['id']
floating_ip['status'] = 'ACTIVE'
self._add_snat_port_info_to_router(router, internal_ports)
# FIP has a dependency on external gateway. So we need to create
# the snat_port info and fip_agent_gw_port_info irrespective of
# the agent type the dvr supports. The namespace creation is
# dependent on the agent_type.
external_gw_port = router['gw_port']
self._add_fip_agent_gw_port_info_to_router(router, external_gw_port)
return router
def _add_fip_agent_gw_port_info_to_router(self, router, external_gw_port):
# Add fip agent gateway port information to the router_info
fip_gw_port_list = router.get(
l3_constants.FLOATINGIP_AGENT_INTF_KEY, [])
if not fip_gw_port_list and external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
router[l3_constants.FLOATINGIP_AGENT_INTF_KEY] = [
{'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': 'network:floatingip_agent_gateway',
'mac_address': 'fa:16:3e:80:8d:89',
'binding:host_id': self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _add_snat_port_info_to_router(self, router, internal_ports):
# Add snat port information to the router
snat_port_list = router.get(l3_constants.SNAT_ROUTER_INTF_KEY, [])
if not snat_port_list and internal_ports:
# Get values from internal port
port = internal_ports[0]
fixed_ip = port['fixed_ips'][0]
snat_subnet = port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
snat_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add the info to router as the first snat port
# in the list of snat ports
prefixlen = netaddr.IPNetwork(snat_subnet['cidr']).prefixlen
router[l3_constants.SNAT_ROUTER_INTF_KEY] = [
{'subnets': [
{'cidr': snat_subnet['cidr'],
'gateway_ip': snat_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': port['network_id'],
'device_owner': 'network:router_centralized_snat',
'mac_address': 'fa:16:3e:80:8d:89',
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': snat_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _assert_dvr_external_device(self, router):
external_port = router.get_ex_gw_port()
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
# if the agent is in dvr_snat mode, then we have to check
# that the correct ports and ip addresses exist in the
# snat_ns_name namespace
if self.agent.conf.agent_mode == 'dvr_snat':
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
snat_ns_name))
# if the agent is in dvr mode then the snat_ns_name namespace
# should not be present at all:
elif self.agent.conf.agent_mode == 'dvr':
self.assertFalse(
self._namespace_exists(snat_ns_name),
"namespace %s was found but agent is in dvr mode not dvr_snat"
% (str(snat_ns_name))
)
# if the agent is anything else the test is misconfigured
# we force a test failure with message
else:
self.assertTrue(False, " agent not configured for dvr or dvr_snat")
def _assert_dvr_gateway(self, router):
gateway_expected_in_snat_namespace = (
self.agent.conf.agent_mode == 'dvr_snat'
)
if gateway_expected_in_snat_namespace:
self._assert_dvr_snat_gateway(router)
snat_namespace_should_not_exist = (
self.agent.conf.agent_mode == 'dvr'
)
if snat_namespace_should_not_exist:
self._assert_snat_namespace_does_not_exist(router)
def _assert_dvr_snat_gateway(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=namespace)
existing_gateway = (
external_device.route.get_gateway().get('gateway'))
expected_gateway = external_port['subnets'][0]['gateway_ip']
self.assertEqual(expected_gateway, existing_gateway)
def _assert_snat_namespace_does_not_exist(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertFalse(self._namespace_exists(namespace))
def _assert_dvr_floating_ips(self, router):
# in the fip namespace:
# Check that the fg-<port-id> (floatingip_agent_gateway)
# is created with the ip address of the external gateway port
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
self.assertTrue(floating_ips)
# We need to fetch the floatingip agent gateway port info
# from the router_info
floating_agent_gw_port = (
router.router[l3_constants.FLOATINGIP_AGENT_INTF_KEY])
self.assertTrue(floating_agent_gw_port)
external_gw_port = floating_agent_gw_port[0]
fip_ns = self.agent.get_fip_ns(floating_ips[0]['floating_network_id'])
fip_ns_name = fip_ns.get_name()
fg_port_created_successfully = ip_lib.device_exists_with_ips_and_mac(
fip_ns.get_ext_device_name(external_gw_port['id']),
[self._port_first_ip_cidr(external_gw_port)],
external_gw_port['mac_address'],
namespace=fip_ns_name)
self.assertTrue(fg_port_created_successfully)
# Check fpr-router device has been created
device_name = fip_ns.get_int_device_name(router.router_id)
fpr_router_device_created_successfully = ip_lib.device_exists(
device_name, namespace=fip_ns_name)
self.assertTrue(fpr_router_device_created_successfully)
# In the router namespace
# Check rfp-<router-id> is created correctly
for fip in floating_ips:
device_name = fip_ns.get_rtr_ext_device_name(router.router_id)
self.assertTrue(ip_lib.device_exists(
device_name, namespace=router.ns_name))
def test_dvr_router_rem_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self._create_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12', self.agent.conf.host)
fip_ns = router1.fip_ns.get_name()
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router[l3_constants.FLOATINGIP_KEY] = []
self._create_router(restarted_agent, router1.router)
self._assert_dvr_snat_gateway(router1)
self.assertFalse(self._namespace_exists(fip_ns))
def test_dvr_router_add_internal_network_set_arp_cache(self):
# Check that, when the router is set up and there are
# existing ports on the the uplinked subnet, the ARP
# cache is properly populated.
self.agent.conf.agent_mode = 'dvr_snat'
router_info = test_l3_agent.prepare_router_data()
router_info['distributed'] = True
expected_neighbor = '35.4.1.10'
port_data = {
'fixed_ips': [{'ip_address': expected_neighbor}],
'mac_address': 'fa:3e:aa:bb:cc:dd',
'device_owner': 'compute:None'
}
self.agent.plugin_rpc.get_ports_by_subnet.return_value = [port_data]
router1 = self._create_router(self.agent, router_info)
internal_device = router1.get_internal_device_name(
router_info['_interfaces'][0]['id'])
neighbors = ip_lib.IPDevice(internal_device, router1.ns_name).neigh
self.assertEqual(expected_neighbor, neighbors.show().split()[0])
def _assert_rfp_fpr_mtu(self, router, expected_mtu=1500):
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_rtr_ext_device_name,
router.ns_name)
self.assertEqual(expected_mtu, dev_mtu)
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_int_device_name,
router.fip_ns.get_name())
self.assertEqual(expected_mtu, dev_mtu)
|
pnavarro/neutron
|
neutron/tests/functional/agent/test_l3_agent.py
|
Python
|
apache-2.0
| 53,684
|
// Copyright 2006, 2008, 2009 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal.bindings;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import org.apache.tapestry5.beanmodel.PropertyConduit;
import org.apache.tapestry5.beanmodel.PropertyConduit2;
import org.apache.tapestry5.beanmodel.internal.services.Invariant;
import org.apache.tapestry5.commons.Location;
import org.apache.tapestry5.commons.internal.util.TapestryException;
import org.apache.tapestry5.internal.TapestryInternalUtils;
/**
* Base class for bindings created by the {@link org.apache.tapestry5.internal.bindings.PropBindingFactory}. A subclass
* of this is created at runtime.
*/
public class PropBinding extends AbstractBinding implements InternalPropBinding
{
private final Object root;
private final PropertyConduit conduit;
private final String toString;
private boolean invariant;
private final String expression;
public PropBinding(final Location location, final Object root, final PropertyConduit conduit, final String expression, final String toString)
{
super(location);
this.root = root;
this.conduit = conduit;
this.expression = expression.intern();
this.toString = toString;
invariant = conduit.getAnnotation(Invariant.class) != null;
}
/**
* The default implementation of get() will throw a TapestryException (binding is write only). The fabricated
* subclass <em>may</em> override this method (as well as set()).
*/
public Object get()
{
try
{
return conduit.get(root);
}
catch (Exception ex)
{
throw new TapestryException(ex.getMessage(), getLocation(), ex);
}
}
@Override
public void set(Object value)
{
try
{
conduit.set(root, value);
}
catch (Exception ex)
{
throw new TapestryException(ex.getMessage(), getLocation(), ex);
}
}
@Override
public String toString()
{
return toString;
}
/**
* Almost always returns false, unless the conduit provides the {@link org.apache.tapestry5.beanmodel.internal.services.Invariant}
* annotation.
*/
@Override
public boolean isInvariant()
{
return invariant;
}
@Override
public Class getBindingType()
{
return conduit.getPropertyType();
}
/**
* Get the generic type from the underlying property
*
* @see PropertyConduit2#getPropertyGenericType()
*/
@Override
public Type getBindingGenericType()
{
if (conduit instanceof PropertyConduit2) {
return ((PropertyConduit2) conduit).getPropertyGenericType();
}
return conduit.getPropertyType();
}
@Override
public <T extends Annotation> T getAnnotation(Class<T> annotationClass)
{
return conduit.getAnnotation(annotationClass);
}
public String getPropertyName()
{
return TapestryInternalUtils.toInternalPropertyConduit(conduit).getPropertyName();
}
public String getExpression()
{
return expression;
}
}
|
apache/tapestry-5
|
tapestry-core/src/main/java/org/apache/tapestry5/internal/bindings/PropBinding.java
|
Java
|
apache-2.0
| 3,810
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! SQL parser and query planner
pub mod parser;
pub mod planner;
|
itaiin/arrow
|
rust/datafusion/src/sql/mod.rs
|
Rust
|
apache-2.0
| 869
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers;
import java.util.List;
import javax.servlet.Filter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.security.access.vote.AffirmativeBased;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.test.SpringTestContext;
import org.springframework.security.config.test.SpringTestContextExtension;
import org.springframework.security.test.context.annotation.SecurityTestExecutionListeners;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.security.web.FilterChainProxy;
import org.springframework.security.web.access.intercept.FilterSecurityInterceptor;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.web.servlet.MockMvc;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Rob Winch
* @author Josh Cummings
*
*/
@ExtendWith({ SpringExtension.class, SpringTestContextExtension.class })
@SecurityTestExecutionListeners
public class UrlAuthorizationsTests {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
@Test
@WithMockUser(authorities = "ROLE_USER")
public void hasAnyAuthorityWhenAuthoritySpecifiedThenMatchesAuthority() throws Exception {
this.spring.register(RoleConfig.class).autowire();
// @formatter:off
this.mvc.perform(get("/role-user-authority"))
.andExpect(status().isNotFound());
this.mvc.perform(get("/role-user"))
.andExpect(status().isNotFound());
this.mvc.perform(get("/role-admin-authority"))
.andExpect(status().isForbidden());
// @formatter:on
}
@Test
@WithMockUser(authorities = "ROLE_ADMIN")
public void hasAnyAuthorityWhenAuthoritiesSpecifiedThenMatchesAuthority() throws Exception {
this.spring.register(RoleConfig.class).autowire();
this.mvc.perform(get("/role-user-admin-authority")).andExpect(status().isNotFound());
this.mvc.perform(get("/role-user-admin")).andExpect(status().isNotFound());
this.mvc.perform(get("/role-user-authority")).andExpect(status().isForbidden());
}
@Test
@WithMockUser(roles = "USER")
public void hasAnyRoleWhenRoleSpecifiedThenMatchesRole() throws Exception {
this.spring.register(RoleConfig.class).autowire();
// @formatter:off
this.mvc.perform(get("/role-user"))
.andExpect(status().isNotFound());
this.mvc.perform(get("/role-admin"))
.andExpect(status().isForbidden());
// @formatter:on
}
@Test
@WithMockUser(roles = "ADMIN")
public void hasAnyRoleWhenRolesSpecifiedThenMatchesRole() throws Exception {
this.spring.register(RoleConfig.class).autowire();
this.mvc.perform(get("/role-admin-user")).andExpect(status().isNotFound());
this.mvc.perform(get("/role-user")).andExpect(status().isForbidden());
}
@Test
@WithMockUser(authorities = "USER")
public void hasAnyRoleWhenRoleSpecifiedThenDoesNotMatchAuthority() throws Exception {
this.spring.register(RoleConfig.class).autowire();
// @formatter:off
this.mvc.perform(get("/role-user"))
.andExpect(status().isForbidden());
this.mvc.perform(get("/role-admin"))
.andExpect(status().isForbidden());
// @formatter:on
}
@Test
public void configureWhenNoAccessDecisionManagerThenDefaultsToAffirmativeBased() {
this.spring.register(NoSpecificAccessDecisionManagerConfig.class).autowire();
FilterSecurityInterceptor interceptor = getFilter(FilterSecurityInterceptor.class);
assertThat(interceptor).isNotNull();
assertThat(interceptor).extracting("accessDecisionManager").isInstanceOf(AffirmativeBased.class);
}
private <T extends Filter> T getFilter(Class<T> filterType) {
FilterChainProxy proxy = this.spring.getContext().getBean(FilterChainProxy.class);
List<Filter> filters = proxy.getFilters("/");
for (Filter filter : filters) {
if (filterType.isAssignableFrom(filter.getClass())) {
return (T) filter;
}
}
return null;
}
@EnableWebSecurity
static class RoleConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/role-user-authority").hasAnyAuthority("ROLE_USER")
.antMatchers("/role-admin-authority").hasAnyAuthority("ROLE_ADMIN")
.antMatchers("/role-user-admin-authority").hasAnyAuthority("ROLE_USER", "ROLE_ADMIN")
.antMatchers("/role-user").hasAnyRole("USER")
.antMatchers("/role-admin").hasAnyRole("ADMIN")
.antMatchers("/role-user-admin").hasAnyRole("USER", "ADMIN");
// @formatter:on
}
}
@EnableWebSecurity
static class NoSpecificAccessDecisionManagerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
ApplicationContext context = getApplicationContext();
UrlAuthorizationConfigurer<HttpSecurity>.StandardInterceptUrlRegistry registry = http
.apply(new UrlAuthorizationConfigurer(context)).getRegistry();
// @formatter:off
registry
.antMatchers("/a").hasRole("ADMIN")
.anyRequest().hasRole("USER");
// @formatter:on
}
}
}
|
jgrandja/spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/UrlAuthorizationsTests.java
|
Java
|
apache-2.0
| 6,350
|
/**
*
* Process Editor
*
* (C) 2009, 2010 inubit AG
* (C) 2014 the authors
*
*/
package com.inubit.research.gui.plugins.validationPlugin;
import com.inubit.research.gui.Workbench;
import com.inubit.research.gui.WorkbenchEditorListener;
import com.inubit.research.validation.ValidationMessage;
import com.inubit.research.validation.Validator;
import java.awt.Color;
import java.awt.Insets;
import java.awt.Rectangle;
import java.awt.Toolkit;
import java.awt.event.KeyEvent;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import javax.swing.table.DefaultTableModel;
import net.frapu.code.visualization.ProcessEdge;
import net.frapu.code.visualization.ProcessEditor;
import net.frapu.code.visualization.ProcessModel;
import net.frapu.code.visualization.ProcessModelListener;
import net.frapu.code.visualization.ProcessNode;
import net.frapu.code.visualization.ProcessObject;
/**
*
* @author tmi
*/
public class CheckingResultDialog extends javax.swing.JDialog {
private Validator validator;
private Collection<ProcessObject> highlightedObjects =
new HashSet<ProcessObject>();
private ProcessObject primaryHighlightedObject;
private Color originalColorOfPrimaryObject;
private List<ValidationMessage> shownMessages;
private ProcessModel currentModel;
private ProcessModelListener listener;
private final String[] ignoredPropertyChanges = {ProcessNode.PROP_BACKGROUND,
ProcessNode.PROP_HEIGHT, ProcessNode.PROP_SHADOW, ProcessNode.PROP_WIDTH,
ProcessNode.PROP_XPOS, ProcessNode.PROP_YPOS,
ProcessEdge.PROP_COLOR_ARC, ProcessEdge.PROP_LABELOFFSET,
ProcessEdge.PROP_POINTS, ProcessEdge.PROP_SOURCE_DOCKPOINT,
ProcessEdge.PROP_TARGET_DOCKPOINT};
/** Creates new form CheckingResultDialog */
public CheckingResultDialog(Workbench parent, Validator validator) {
super(parent, false);
initComponents();
this.validator = validator;
updateMessages();
fitPositionToParent();
selectedModelChanged();
getWorkbench().addWorkbenchEditorListener(new WorkbenchEditorListener() {
@Override
public void newEditorCreated(ProcessEditor editor) {
selectedModelChanged();
}
@Override
public void selectedProcessEditorChanged(ProcessEditor editor) {
selectedModelChanged();
}
});
}
protected void selectedModelChanged() {
if (listener != null && currentModel != null) {
currentModel.removeListener(listener);
}
currentModel = getWorkbench().getSelectedModel();
listener =new ProcessModelListener() {
@Override
public void processNodeAdded(ProcessNode newNode) {
doRevalidate();
}
@Override
public void processNodeRemoved(ProcessNode remNode) {
doRevalidate();
}
@Override
public void processEdgeAdded(ProcessEdge edge) {
doRevalidate();
}
@Override
public void processEdgeRemoved(ProcessEdge edge) {
doRevalidate();
}
@Override
public void processObjectPropertyChange(ProcessObject obj,
String name, String oldValue, String newValue) {
if (!Arrays.asList(ignoredPropertyChanges).contains(name)) {
doRevalidate();
}
}
};
//currentModel.addListener(listener);
validator = new Validator(currentModel, ValidationPlugin.getInstance(getWorkbench()).getSupportedModels());
doRevalidate();
}
private Workbench getWorkbench() {
return (Workbench) getParent();
}
private void updateMessages() {
fillTable();
updateSummaryLabel();
}
private void fillTable() {
String[] columns = {"Type", "Description"};
shownMessages = new LinkedList<ValidationMessage>();
DefaultTableModel model = new DefaultTableModel(columns, 0) {
@Override
public boolean isCellEditable(int row, int column) {
return false;
}
};
for (ValidationMessage message : validator.getMessages(
viewErrorsCheckbox.isSelected(),
viewWarningsCheckbox.isSelected(),
viewInformationCheckbox.isSelected())) {
String[] row = {messageTypeString(message), message.getShortDescription()};
model.addRow(row);
shownMessages.add(message);
}
messagesTable.setModel(model);
messagesTable.getColumnModel().getColumn(0).setMinWidth(75);
messagesTable.getColumnModel().getColumn(0).setMaxWidth(90);
messagesTable.getColumnModel().getColumn(0).setPreferredWidth(80);
}
private void updateSummaryLabel() {
summaryLabel.setText(validator.getAllMessages().size() + " messages: "
+ validator.getErrorMessages().size() + " errors, "
+ validator.getWarningMessages().size() + " warnings, "
+ validator.getInformationMessages().size() + " information-messages");
}
private String messageTypeString(ValidationMessage message) {
if(message.getType() == ValidationMessage.TYPE_INFO) {
return "Information";
} else if (message.getType() == ValidationMessage.TYPE_WARNING) {
return "Warning";
} else {
return "Error";
}
}
private void fitPositionToParent() {
Rectangle screenBounds = getParent().getGraphicsConfiguration().getBounds();
Insets insets = Toolkit.getDefaultToolkit().getScreenInsets(
getParent().getGraphicsConfiguration());
screenBounds = new Rectangle((int)screenBounds.getX() - insets.left,
(int)screenBounds.getY() - insets.top,
(int) screenBounds.getWidth() - insets.left - insets.right,
(int) screenBounds.getHeight() - insets.top - insets.bottom);
Rectangle parentBounds = getParent().getBounds();
if(parentBounds.getMaxX() + getWidth() <= screenBounds.getMaxX()) {
setLocation((int) parentBounds.getMaxX(), (int) parentBounds.getY());
} else if (parentBounds.getX() - getWidth() >= screenBounds.getX()) {
setLocation((int)parentBounds.getX() - getWidth(),
(int) parentBounds.getY());
} else if (parentBounds.getMaxY() + getHeight() <= screenBounds.getMaxY()) {
setLocation((int)parentBounds.getX(), (int) parentBounds.getMaxY());
} else if (parentBounds.getY() - getHeight() >= screenBounds.getY()) {
setLocation((int)parentBounds.getX(),
(int) parentBounds.getY() - getHeight());
} else {
setLocation((int) screenBounds.getMaxX() - getWidth(),
(int) screenBounds.getMaxY() - getHeight());
}
}
private void selectionChanged() {
if (messagesTable.getSelectedRow() == -1) {
removeHighlighting();
descriptionPane.setText("Select an element to view its full "
+ "description and highlight all related process objects");
} else {
ValidationMessage selectedMessage = shownMessages.get(
messagesTable.getSelectedRow());
descriptionPane.setText(selectedMessage.getDescription());
removeHighlighting();
highlightedObjects = new HashSet<ProcessObject>(
selectedMessage.getInvolvedObjects());
if (selectedMessage.hasPrimaryObject()) {
highlightPrimaryObject(selectedMessage.getPrimaryObject());
}
for (ProcessObject object : highlightedObjects) {
object.setHighlighted(true);
}
getWorkbench().repaint();
}
}
private void removeHighlighting() {
for(ProcessObject object : highlightedObjects) {
object.setHighlighted(false);
}
if (primaryHighlightedObject != null) {
if (primaryHighlightedObject instanceof ProcessNode) {
((ProcessNode)primaryHighlightedObject).
setBackground(originalColorOfPrimaryObject);
primaryHighlightedObject = null;
} else if (primaryHighlightedObject instanceof ProcessEdge) {
((ProcessEdge)primaryHighlightedObject).setColor(
originalColorOfPrimaryObject);
primaryHighlightedObject = null;
}
}
getWorkbench().repaint();
}
private void highlightPrimaryObject(ProcessObject primaryObject) {
if (primaryObject instanceof ProcessNode) {
primaryHighlightedObject = primaryObject;
originalColorOfPrimaryObject =
((ProcessNode)primaryObject).getBackground();
((ProcessNode)primaryObject).setBackground(new Color(255, 208, 0));
} else if (primaryObject instanceof ProcessEdge) {
primaryHighlightedObject = primaryObject;
originalColorOfPrimaryObject =
((ProcessEdge)primaryObject).getColor();
((ProcessEdge)primaryObject).setColor(new Color(255, 208, 0));
}
}
private void doRevalidate() {
validator.performCheck();
updateMessages();
selectionChanged();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPane1 = new javax.swing.JScrollPane();
messagesTable = new javax.swing.JTable();
closeButton = new javax.swing.JButton();
revalidateButton = new javax.swing.JButton();
summaryLabel = new javax.swing.JLabel();
jScrollPane2 = new javax.swing.JScrollPane();
descriptionPane = new javax.swing.JTextPane();
jLabel1 = new javax.swing.JLabel();
viewErrorsCheckbox = new javax.swing.JCheckBox();
viewWarningsCheckbox = new javax.swing.JCheckBox();
viewInformationCheckbox = new javax.swing.JCheckBox();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Validation Result");
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosed(java.awt.event.WindowEvent evt) {
formWindowClosed(evt);
}
});
messagesTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
"Type", "Short description"
}
) {
Class[] types = new Class [] {
java.lang.String.class, java.lang.String.class
};
boolean[] canEdit = new boolean [] {
false, false
};
public Class getColumnClass(int columnIndex) {
return types [columnIndex];
}
public boolean isCellEditable(int rowIndex, int columnIndex) {
return canEdit [columnIndex];
}
});
messagesTable.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION);
messagesTable.addMouseListener(new java.awt.event.MouseAdapter() {
public void mousePressed(java.awt.event.MouseEvent evt) {
messagesTableMousePressed(evt);
}
});
messagesTable.addKeyListener(new java.awt.event.KeyAdapter() {
public void keyReleased(java.awt.event.KeyEvent evt) {
messagesTableKeyReleased(evt);
}
});
jScrollPane1.setViewportView(messagesTable);
closeButton.setText("Close");
closeButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
closeButtonActionPerformed(evt);
}
});
revalidateButton.setText("Revalidate");
revalidateButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
revalidateButtonActionPerformed(evt);
}
});
summaryLabel.setFont(new java.awt.Font("Tahoma", 1, 11));
summaryLabel.setText("0 messages: 0 errors, 0 warnings, 0 information-messages");
jScrollPane2.setBorder(null);
jScrollPane2.setOpaque(false);
descriptionPane.setBorder(null);
descriptionPane.setEditable(false);
descriptionPane.setText("Select an element to view its full description and highlight all related process objects");
descriptionPane.setOpaque(false);
jScrollPane2.setViewportView(descriptionPane);
jLabel1.setText("View:");
viewErrorsCheckbox.setSelected(true);
viewErrorsCheckbox.setText("Errors");
viewErrorsCheckbox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
viewErrorsCheckboxActionPerformed(evt);
}
});
viewWarningsCheckbox.setSelected(true);
viewWarningsCheckbox.setText("Warnings");
viewWarningsCheckbox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
viewWarningsCheckboxActionPerformed(evt);
}
});
viewInformationCheckbox.setSelected(true);
viewInformationCheckbox.setText("Information messages");
viewInformationCheckbox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
viewInformationCheckboxActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane2, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, 380, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel1)
.addGap(18, 18, 18)
.addComponent(viewErrorsCheckbox)
.addGap(18, 18, 18)
.addComponent(viewWarningsCheckbox)
.addGap(18, 18, 18)
.addComponent(viewInformationCheckbox))
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 380, Short.MAX_VALUE)
.addComponent(summaryLabel)
.addGroup(layout.createSequentialGroup()
.addComponent(revalidateButton)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 213, Short.MAX_VALUE)
.addComponent(closeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 82, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(summaryLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 141, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(viewErrorsCheckbox)
.addComponent(viewWarningsCheckbox)
.addComponent(viewInformationCheckbox))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 91, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(closeButton)
.addComponent(revalidateButton))
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void closeButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_closeButtonActionPerformed
dispose();
}//GEN-LAST:event_closeButtonActionPerformed
private void messagesTableMousePressed(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_messagesTableMousePressed
selectionChanged();
}//GEN-LAST:event_messagesTableMousePressed
private void formWindowClosed(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosed
removeHighlighting();
currentModel.removeListener(listener);
}//GEN-LAST:event_formWindowClosed
private void messagesTableKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_messagesTableKeyReleased
if(evt.getKeyCode() == KeyEvent.VK_UP ||
evt.getKeyCode() == KeyEvent.VK_DOWN) {
selectionChanged();
}
}//GEN-LAST:event_messagesTableKeyReleased
private void revalidateButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_revalidateButtonActionPerformed
doRevalidate();
}//GEN-LAST:event_revalidateButtonActionPerformed
private void viewErrorsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewErrorsCheckboxActionPerformed
fillTable();
selectionChanged();
}//GEN-LAST:event_viewErrorsCheckboxActionPerformed
private void viewWarningsCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewWarningsCheckboxActionPerformed
fillTable();
selectionChanged();
}//GEN-LAST:event_viewWarningsCheckboxActionPerformed
private void viewInformationCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewInformationCheckboxActionPerformed
fillTable();
selectionChanged();
}//GEN-LAST:event_viewInformationCheckboxActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton closeButton;
private javax.swing.JTextPane descriptionPane;
private javax.swing.JLabel jLabel1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JTable messagesTable;
private javax.swing.JButton revalidateButton;
private javax.swing.JLabel summaryLabel;
private javax.swing.JCheckBox viewErrorsCheckbox;
private javax.swing.JCheckBox viewInformationCheckbox;
private javax.swing.JCheckBox viewWarningsCheckbox;
// End of variables declaration//GEN-END:variables
}
|
bptlab/processeditor
|
src/com/inubit/research/gui/plugins/validationPlugin/CheckingResultDialog.java
|
Java
|
apache-2.0
| 20,064
|
# ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =============enthought library imports=======================
import os
from datetime import datetime, timedelta
from threading import Lock
import six
from sqlalchemy import create_engine, distinct, MetaData
from sqlalchemy.exc import (
SQLAlchemyError,
InvalidRequestError,
StatementError,
DBAPIError,
OperationalError,
)
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
from traits.api import (
Password,
Bool,
Str,
on_trait_change,
Any,
Property,
cached_property,
Int,
)
from pychron.database.core.base_orm import AlembicVersionTable
from pychron.database.core.query import compile_query
from pychron.loggable import Loggable
from pychron.regex import IPREGEX
def obscure_host(h):
if IPREGEX.match(h):
h = "x.x.x.{}".format(h.split(".")[-1])
return h
def binfunc(ds, hours):
ds = [dx.timestamp for dx in ds]
p1 = ds[0]
delta_seconds = hours * 3600
td = timedelta(seconds=delta_seconds * 0.25)
for i, di in enumerate(ds):
i = max(0, i - 1)
dd = ds[i]
if (di - dd).total_seconds() > delta_seconds:
yield p1 - td, dd + td
p1 = di
yield p1 - td, di + td
class SessionCTX(object):
def __init__(self, parent, use_parent_session=True):
self._use_parent_session = use_parent_session
self._parent = parent
self._session = None
self._psession = None
def __enter__(self):
if self._use_parent_session:
self._parent.create_session()
return self._parent.session
else:
self._psession = self._parent.session
self._session = self._parent.session_factory()
self._parent.session = self._session
return self._session
def __exit__(self, exc_type, exc_val, exc_tb):
if self._session:
self._session.close()
else:
self._parent.close_session()
if self._psession:
self._parent.session = self._psession
self._psession = None
class MockQuery:
def join(self, *args, **kw):
return self
def filter(self, *args, **kw):
# type: (object, object) -> object
return self
def all(self, *args, **kw):
return []
def order_by(self, *args, **kw):
return self
class MockSession:
def query(self, *args, **kw):
return MockQuery()
# def __getattr__(self, item):
# return
class DatabaseAdapter(Loggable):
"""
The DatabaseAdapter is a base class for interacting with a SQLAlchemy database.
Two main subclasses are used by pychron, IsotopeAdapter and MassSpecDatabaseAdapter.
This class provides attributes for describing the database url, i.e host, user, password etc,
and methods for connecting and opening database sessions.
It also provides some helper functions used extensively by the subclasses, e.g. ``_add_item``,
``_retrieve_items``
"""
session = None
sess_stack = 0
reraise = False
connected = Bool(False)
kind = Str
prev_kind = Str
username = Str
host = Str
password = Password
timeout = Int
session_factory = None
application = Any
test_func = "get_versions"
version_func = "get_versions"
autoflush = True
autocommit = False
commit_on_add = True
# name used when writing to database
# save_username = Str
connection_parameters_changed = Bool
url = Property(depends_on="connection_parameters_changed")
datasource_url = Property(depends_on="connection_parameters_changed")
path = Str
echo = False
verbose_retrieve_query = False
verbose = True
connection_error = Str
_session_lock = None
modified = False
_trying_to_add = False
_test_connection_enabled = True
def __init__(self, *args, **kw):
super(DatabaseAdapter, self).__init__(*args, **kw)
self._session_lock = Lock()
def create_all(self, metadata):
"""
Build a database schema with the current connection
:param metadata: SQLAchemy MetaData object
"""
# if self.kind == 'sqlite':
metadata.create_all(self.session.bind)
# def session_ctx(self, sess=None, commit=True, rollback=True):
# """
# Make a new session context.
#
# :return: ``SessionCTX``
# """
# with self._session_lock:
# if sess is None:
# sess = self.sess
# return SessionCTX(sess, parent=self, commit=commit, rollback=rollback)
_session_cnt = 0
def session_ctx(self, use_parent_session=True):
with self._session_lock:
return SessionCTX(self, use_parent_session)
def create_session(self, force=False):
if self.connect(test=False):
if self.session_factory:
if force:
self.debug("force create new session {}".format(id(self)))
if self.session:
self.session.close()
self.session = self.session_factory()
self._session_cnt = 1
else:
if not self.session:
# self.debug('create new session {}'.format(id(self)))
self.session = self.session_factory()
self._session_cnt += 1
else:
self.warning("no session factory")
else:
self.session = MockSession()
def close_session(self):
if self.session and not isinstance(self.session, MockSession):
self.session.flush()
self._session_cnt -= 1
if not self._session_cnt:
self.debug("close session {}".format(id(self)))
self.session.close()
self.session = None
@property
def enabled(self):
return self.kind in ["mysql", "sqlite", "postgresql", "mssql"]
@property
def save_username(self):
from pychron.globals import globalv
return globalv.username
@on_trait_change("username,host,password,name,kind,path")
def reset_connection(self):
"""
Trip the ``connection_parameters_changed`` flag. Next ``connect`` call with use the new values
"""
self.connection_parameters_changed = True
self.session_factory = None
self.session = None
# @caller
def connect(
self, test=True, force=False, warn=True, version_warn=True, attribute_warn=False
):
"""
Connect to the database
:param test: Test the connection by running ``test_func``
:param force: Test connection even if connection parameters haven't changed
:param warn: Warn if the connection test fails
:param version_warn: Warn if database/pychron versions don't match
:return: True if connected else False
:rtype: bool
"""
self.connection_error = ""
if force:
self.debug("forcing database connection")
if self.connection_parameters_changed:
self._test_connection_enabled = True
force = True
if not self.connected or force:
# self.connected = True if self.kind == 'sqlite' else False
self.connected = False
pool_recycle = 600
if self.kind == "sqlite":
self.connected = True
test = False
pool_recycle = -1
self.connection_error = (
'Database "{}" kind not set. '
'Set in Preferences. current kind="{}"'.format(self.name, self.kind)
)
if not self.enabled:
from pychron.core.ui.gui import invoke_in_main_thread
invoke_in_main_thread(self.warning_dialog, self.connection_error)
else:
url = self.url
if url is not None:
self.info(
"{} connecting to database {}".format(id(self), self.public_url)
)
engine = create_engine(
url, echo=self.echo, pool_recycle=pool_recycle
)
self.session_factory = sessionmaker(
bind=engine,
autoflush=self.autoflush,
expire_on_commit=False,
autocommit=self.autocommit,
)
if test:
if not self._test_connection_enabled:
warn = False
else:
if self.test_func:
self.connected = self._test_db_connection(version_warn)
else:
self.connected = True
else:
self.connected = True
if self.connected:
self.info("connected to db {}".format(self.public_url))
# self.initialize_database()
else:
self.connection_error = 'Not Connected to Database "{}".\nAccess Denied for user= {} \
host= {}\nurl= {}'.format(
self.name, self.username, self.host, self.public_url
)
if warn:
from pychron.core.ui.gui import invoke_in_main_thread
invoke_in_main_thread(
self.warning_dialog, self.connection_error
)
self.connection_parameters_changed = False
return self.connected
# def initialize_database(self):
# pass
def rollback(self):
if self.session:
self.session.rollback()
def flush(self):
"""
flush the session
"""
if self.session:
try:
self.session.flush()
except:
self.session.rollback()
def expire(self, i):
if self.session:
self.session.expire(i)
def expire_all(self):
if self.session:
self.session.expire_all()
def commit(self):
"""
commit the session
"""
if self.session:
try:
self.session.commit()
except BaseException as e:
self.warning("Commit exception: {}".format(e))
self.session.rollback()
def delete(self, obj):
if self.session:
self.session.delete(obj)
def post_commit(self):
if self._trying_to_add:
self.modified = True
def add_item(self, *args, **kw):
return self._add_item(*args, **kw)
# def get_session(self):
# """
# return the current session or make a new one
#
# :return: Session
# """
# sess = self.sess
# if sess is None:
# self.debug('$$$$$$$$$$$$$$$$ session is None')
# sess = self.session_factory()
#
# return sess
def get_migrate_version(self, **kw):
"""
Query the AlembicVersionTable
"""
q = self.session.query(AlembicVersionTable)
mv = q.one()
return mv
def get_versions(self, **kw):
pass
@property
def public_datasource_url(self):
if self.kind == "sqlite":
url = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
url = "{}:{}".format(obscure_host(self.host), self.name)
return url
@cached_property
def _get_datasource_url(self):
if self.kind == "sqlite":
url = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
url = "{}:{}".format(self.host, self.name)
return url
@property
def public_url(self):
kind = self.kind
user = self.username
host = self.host
name = self.name
if kind == "sqlite":
pu = "{}:{}".format(
os.path.basename(os.path.dirname(self.path)),
os.path.basename(self.path),
)
else:
pu = "{}://{}@{}/{}".format(kind, user, host, name)
return pu
@cached_property
def _get_url(self):
kind = self.kind
password = self.password
user = self.username
host = self.host
name = self.name
timeout = self.timeout
if kind in ("mysql", "postgresql", "mssql"):
if kind == "mysql":
# add support for different mysql drivers
driver = self._import_mysql_driver()
if driver is None:
return
elif kind == "mssql":
driver = self._import_mssql_driver()
if driver is None:
return
else:
driver = "pg8000"
if password:
user = "{}:{}".format(user, password)
prefix = "{}+{}://{}@".format(kind, driver, user)
if driver == "pyodbc":
url = "{}{}".format(prefix, name)
else:
url = "{}{}/{}".format(prefix, host, name)
if kind == "mysql" and self.timeout:
url = "{}?connect_timeout={}".format(url, timeout)
else:
url = "sqlite:///{}".format(self.path)
return url
def _import_mssql_driver(self):
driver = None
try:
import pyodbc
driver = "pyodbc"
except ImportError:
try:
import pymssql
driver = "pymssql"
except ImportError:
pass
self.info('using mssql driver="{}"'.format(driver))
return driver
def _import_mysql_driver(self):
try:
"""
pymysql
https://github.com/petehunt/PyMySQL/
"""
import pymysql
driver = "pymysql"
except ImportError:
try:
import _mysql
driver = "mysqldb"
except ImportError:
self.warning_dialog(
"A mysql driver was not found. Install PyMySQL or MySQL-python"
)
return
self.info('using mysql driver="{}"'.format(driver))
return driver
def _test_db_connection(self, version_warn):
self.connected = True
self.create_session()
try:
self.info("testing database connection {}".format(self.test_func))
vers = getattr(self, self.test_func)(reraise=True)
if version_warn:
self._version_warn_hook()
connected = True
except OperationalError:
self.warning("Operational connection failed to {}".format(self.public_url))
connected = False
self._test_connection_enabled = False
except Exception as e:
self.debug_exception()
self.warning(
"connection failed to {} exception={}".format(self.public_url, e)
)
connected = False
finally:
self.info("closing test session")
self.close_session()
return connected
def _version_warn_hook(self):
pass
# def test_version(self):
# ver = getattr(self, self.version_func)()
# ver = ver.version_num
# aver = version.__alembic__
# if ver != aver:
# return 'Database is out of data. Pychron ver={}, Database ver={}'.format(aver, ver)
def _add_item(self, obj):
sess = self.session
if sess:
sess.add(obj)
try:
if self.autoflush:
sess.flush()
self.modified = True
self._trying_to_add = True
if not self.autocommit and self.commit_on_add:
sess.commit()
return obj
except SQLAlchemyError as e:
import traceback
self.debug(
"add_item exception {} {}".format(obj, traceback.format_exc())
)
sess.rollback()
if self.reraise:
raise
else:
self.critical("No session")
def _add_unique(self, item, attr, name):
nitem = getattr(self, "get_{}".format(attr))(name)
if nitem is None:
self.info("adding {}= {}".format(attr, name))
self._add_item(item)
nitem = item
return nitem
def _get_date_range(self, q, asc, desc, hours=0):
lan = q.order_by(asc).first()
han = q.order_by(desc).first()
lan = datetime.now() if not lan else lan.timestamp
han = datetime.now() if not han else han.timestamp
td = timedelta(hours=hours)
return lan - td, han + td
def _delete_item(self, value, name=None):
if name is not None:
func = getattr(self, "get_{}".format(name))
item = func(value)
else:
item = value
if item:
self.debug("deleting value={},name={},item={}".format(value, name, item))
self.session.delete(item)
def _retrieve_items(
self,
table,
joins=None,
filters=None,
limit=None,
order=None,
distinct_=False,
query_hook=None,
reraise=False,
func="all",
group_by=None,
verbose_query=False,
):
sess = self.session
if sess is None or isinstance(sess, MockSession):
self.debug("USING MOCKSESSION************** {}".format(sess))
return []
if distinct_:
if isinstance(distinct_, bool):
q = sess.query(distinct(table))
else:
q = sess.query(distinct(distinct_))
elif isinstance(table, tuple):
q = sess.query(*table)
else:
q = sess.query(table)
if joins:
try:
for ji in joins:
if ji != table:
q = q.join(ji)
except InvalidRequestError:
if reraise:
raise
if filters is not None:
for fi in filters:
q = q.filter(fi)
if order is not None:
if not isinstance(order, tuple):
order = (order,)
q = q.order_by(*order)
if group_by is not None:
if not isinstance(order, tuple):
group_by = (group_by,)
q = q.group_by(*group_by)
if limit is not None:
q = q.limit(limit)
if query_hook:
q = query_hook(q)
if verbose_query or self.verbose_retrieve_query:
# print compile_query(q)
self.debug(compile_query(q))
items = self._query(q, func, reraise)
if items is None:
items = []
return items
def _retrieve_first(self, table, value=None, key="name", order_by=None):
if value is not None:
if not isinstance(value, (str, int, six.text_type, int, float)):
return value
q = self.session.query(table)
if value is not None:
q = q.filter(getattr(table, key) == value)
try:
if order_by is not None:
q = q.order_by(order_by)
return q.first()
except SQLAlchemyError as e:
print("execption first", e)
return
def _query_all(self, q, **kw):
ret = self._query(q, "all", **kw)
return ret or []
def _query_first(self, q, **kw):
return self._query(q, "first", **kw)
def _query_one(self, q, **kw):
q = q.limit(1)
return self._query(q, "one", **kw)
def _query(self, q, func, reraise=False, verbose_query=False):
if verbose_query:
try:
cq = compile_query(q)
self.debug(cq)
except BaseException:
cq = "Query failed to compile"
self.debug_exception()
# print compile_query(q)
f = getattr(q, func)
try:
return f()
except NoResultFound:
if verbose_query:
self.info("no results found for query -- {}".format(cq))
except OperationalError as e:
self.debug("_query operation exception")
self.debug_exception()
except SQLAlchemyError as e:
if self.verbose:
self.debug("_query exception {}".format(e))
try:
self.rollback()
self.reset_connection()
self.connect()
except BaseException:
pass
if reraise:
raise e
def _append_filters(self, f, kw):
filters = kw.get("filters", [])
if isinstance(f, (tuple, list)):
filters.extend(f)
else:
filters.append(f)
kw["filters"] = filters
return kw
def _append_joins(self, f, kw):
joins = kw.get("joins", [])
if isinstance(f, (tuple, list)):
joins.extend(f)
else:
joins.append(f)
kw["joins"] = joins
return kw
def _retrieve_item(
self,
table,
value,
key="name",
last=None,
joins=None,
filters=None,
options=None,
verbose=True,
verbose_query=False,
):
if not isinstance(value, (str, int, six.text_type, int, float, list, tuple)):
return value
if not isinstance(value, (list, tuple)):
value = (value,)
if not isinstance(key, (list, tuple)):
key = (key,)
def __retrieve(s):
q = s.query(table)
if joins:
try:
for ji in joins:
if ji != table:
q = q.join(ji)
except InvalidRequestError:
pass
if filters is not None:
for fi in filters:
q = q.filter(fi)
for k, v in zip(key, value):
q = q.filter(getattr(table, k) == v)
if last:
q = q.order_by(last)
if verbose_query or self.verbose_retrieve_query:
self.debug(compile_query(q))
ntries = 3
import traceback
for i in range(ntries):
try:
return q.one()
except (DBAPIError, OperationalError, StatementError):
self.debug(traceback.format_exc())
s.rollback()
continue
except MultipleResultsFound:
if verbose:
self.debug(
"multiples row found for {} {} {}. Trying to get last row".format(
table.__tablename__, key, value
)
)
try:
if hasattr(table, "id"):
q = q.order_by(table.id.desc())
return q.limit(1).all()[-1]
except (SQLAlchemyError, IndexError, AttributeError) as e:
if verbose:
self.debug(
"no rows for {} {} {}".format(
table.__tablename__, key, value
)
)
break
except NoResultFound:
if verbose and self.verbose:
self.debug(
"no row found for {} {} {}".format(
table.__tablename__, key, value
)
)
break
close = False
if self.session is None:
self.create_session()
close = True
ret = __retrieve(self.session)
if close:
self.close_session()
return ret
def _get_items(
self,
table,
gtables,
join_table=None,
filter_str=None,
limit=None,
order=None,
key=None,
):
if isinstance(join_table, str):
join_table = gtables[join_table]
q = self._get_query(table, join_table=join_table, filter_str=filter_str)
if order:
for o in order if isinstance(order, list) else [order]:
q = q.order_by(o)
if limit:
q = q.limit(limit)
# reorder based on id
if order:
q = q.from_self()
q = q.order_by(table.id)
res = q.all()
if key:
return [getattr(ri, key) for ri in res]
return res
class PathDatabaseAdapter(DatabaseAdapter):
path_table = None
def add_path(self, rec, path, **kw):
if self.path_table is None:
raise NotImplementedError
kw = self._get_path_keywords(path, kw)
p = self.path_table(**kw)
rec.path = p
return p
def _get_path_keywords(self, path, args):
n = os.path.basename(path)
r = os.path.dirname(path)
args["root"] = r
args["filename"] = n
return args
class SQLiteDatabaseAdapter(DatabaseAdapter):
kind = "sqlite"
def build_database(self):
self.connect(test=False)
if not os.path.isfile(self.path):
meta = MetaData()
self._build_database(self.session, meta)
def _build_database(self, sess, meta):
raise NotImplementedError
# ============= EOF =============================================
|
USGSDenverPychron/pychron
|
pychron/database/core/database_adapter.py
|
Python
|
apache-2.0
| 27,114
|
// Copyright 2014 PDFium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Original code copyright 2014 Foxit Software Inc. http://www.foxitsoftware.com
// Original code is licensed as follows:
/*
* Copyright 2013 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "xfa/src/fxbarcode/barcode.h"
#include "xfa/src/fxbarcode/BC_DecoderResult.h"
#include "xfa/src/fxbarcode/BC_ResultPoint.h"
#include "xfa/src/fxbarcode/common/BC_CommonBitMatrix.h"
#include "BC_PDF417Codeword.h"
#include "BC_PDF417Common.h"
#include "BC_PDF417BarcodeValue.h"
#include "BC_PDF417BarcodeMetadata.h"
#include "BC_PDF417BoundingBox.h"
#include "BC_PDF417DetectionResultColumn.h"
#include "BC_PDF417DetectionResultRowIndicatorColumn.h"
#include "BC_PDF417DetectionResult.h"
#include "BC_PDF417DecodedBitStreamParser.h"
#include "BC_PDF417CodewordDecoder.h"
#include "BC_PDF417DecodedBitStreamParser.h"
#include "BC_PDF417ECModulusPoly.h"
#include "BC_PDF417ECModulusGF.h"
#include "BC_PDF417ECErrorCorrection.h"
#include "BC_PDF417DecodedBitStreamParser.h"
#include "BC_PDF417ScanningDecoder.h"
int32_t CBC_PDF417ScanningDecoder::CODEWORD_SKEW_SIZE = 2;
int32_t CBC_PDF417ScanningDecoder::MAX_ERRORS = 3;
int32_t CBC_PDF417ScanningDecoder::MAX_EC_CODEWORDS = 512;
CBC_PDF417ECErrorCorrection* CBC_PDF417ScanningDecoder::errorCorrection = NULL;
CBC_PDF417ScanningDecoder::CBC_PDF417ScanningDecoder() {}
CBC_PDF417ScanningDecoder::~CBC_PDF417ScanningDecoder() {}
void CBC_PDF417ScanningDecoder::Initialize() {
errorCorrection = new CBC_PDF417ECErrorCorrection;
}
void CBC_PDF417ScanningDecoder::Finalize() {
delete errorCorrection;
}
CBC_CommonDecoderResult* CBC_PDF417ScanningDecoder::decode(
CBC_CommonBitMatrix* image,
CBC_ResultPoint* imageTopLeft,
CBC_ResultPoint* imageBottomLeft,
CBC_ResultPoint* imageTopRight,
CBC_ResultPoint* imageBottomRight,
int32_t minCodewordWidth,
int32_t maxCodewordWidth,
int32_t& e) {
CBC_BoundingBox* boundingBox = new CBC_BoundingBox(
image, imageTopLeft, imageBottomLeft, imageTopRight, imageBottomRight, e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
CBC_DetectionResultRowIndicatorColumn* leftRowIndicatorColumn = NULL;
CBC_DetectionResultRowIndicatorColumn* rightRowIndicatorColumn = NULL;
CBC_DetectionResult* detectionResult = NULL;
for (int32_t i = 0; i < 2; i++) {
if (imageTopLeft != NULL) {
leftRowIndicatorColumn =
getRowIndicatorColumn(image, boundingBox, *imageTopLeft, TRUE,
minCodewordWidth, maxCodewordWidth);
}
if (imageTopRight != NULL) {
rightRowIndicatorColumn =
getRowIndicatorColumn(image, boundingBox, *imageTopRight, FALSE,
minCodewordWidth, maxCodewordWidth);
}
detectionResult = merge(leftRowIndicatorColumn, rightRowIndicatorColumn, e);
if (e != BCExceptionNO) {
e = BCExceptiontNotFoundInstance;
delete leftRowIndicatorColumn;
delete rightRowIndicatorColumn;
delete boundingBox;
return NULL;
}
if (i == 0 && (detectionResult->getBoundingBox()->getMinY() <
boundingBox->getMinY() ||
detectionResult->getBoundingBox()->getMaxY() >
boundingBox->getMaxY())) {
delete boundingBox;
boundingBox = detectionResult->getBoundingBox();
} else {
detectionResult->setBoundingBox(boundingBox);
break;
}
}
int32_t maxBarcodeColumn = detectionResult->getBarcodeColumnCount() + 1;
detectionResult->setDetectionResultColumn(0, leftRowIndicatorColumn);
detectionResult->setDetectionResultColumn(maxBarcodeColumn,
rightRowIndicatorColumn);
FX_BOOL leftToRight = leftRowIndicatorColumn != NULL;
for (int32_t barcodeColumnCount = 1; barcodeColumnCount <= maxBarcodeColumn;
barcodeColumnCount++) {
int32_t barcodeColumn = leftToRight ? barcodeColumnCount
: maxBarcodeColumn - barcodeColumnCount;
if (detectionResult->getDetectionResultColumn(barcodeColumn) != NULL) {
continue;
}
CBC_DetectionResultColumn* detectionResultColumn = NULL;
if (barcodeColumn == 0 || barcodeColumn == maxBarcodeColumn) {
detectionResultColumn = new CBC_DetectionResultRowIndicatorColumn(
boundingBox, barcodeColumn == 0);
} else {
detectionResultColumn = new CBC_DetectionResultColumn(boundingBox);
}
detectionResult->setDetectionResultColumn(barcodeColumn,
detectionResultColumn);
int32_t startColumn = -1;
int32_t previousStartColumn = startColumn;
for (int32_t imageRow = boundingBox->getMinY();
imageRow <= boundingBox->getMaxY(); imageRow++) {
startColumn =
getStartColumn(detectionResult, barcodeColumn, imageRow, leftToRight);
if (startColumn < 0 || startColumn > boundingBox->getMaxX()) {
if (previousStartColumn == -1) {
continue;
}
startColumn = previousStartColumn;
}
CBC_Codeword* codeword = detectCodeword(
image, boundingBox->getMinX(), boundingBox->getMaxX(), leftToRight,
startColumn, imageRow, minCodewordWidth, maxCodewordWidth);
if (codeword != NULL) {
detectionResultColumn->setCodeword(imageRow, codeword);
previousStartColumn = startColumn;
minCodewordWidth = minCodewordWidth < codeword->getWidth()
? minCodewordWidth
: codeword->getWidth();
maxCodewordWidth = maxCodewordWidth > codeword->getWidth()
? maxCodewordWidth
: codeword->getWidth();
}
}
}
CBC_CommonDecoderResult* decoderresult =
createDecoderResult(detectionResult, e);
if (e != BCExceptionNO) {
delete detectionResult;
return NULL;
}
return decoderresult;
}
CFX_ByteString CBC_PDF417ScanningDecoder::toString(
CFX_PtrArray* barcodeMatrix) {
CFX_ByteString result;
for (int32_t row = 0; row < barcodeMatrix->GetSize(); row++) {
result += row;
int32_t l = 0;
for (; l < ((CFX_PtrArray*)barcodeMatrix->GetAt(row))->GetSize(); l++) {
CBC_BarcodeValue* barcodeValue =
(CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(row))
->GetAt(l);
if (barcodeValue->getValue()->GetSize() == 0) {
result += "";
} else {
result += barcodeValue->getValue()->GetAt(0);
result +=
barcodeValue->getConfidence(barcodeValue->getValue()->GetAt(0));
}
}
}
return result;
}
CBC_DetectionResult* CBC_PDF417ScanningDecoder::merge(
CBC_DetectionResultRowIndicatorColumn* leftRowIndicatorColumn,
CBC_DetectionResultRowIndicatorColumn* rightRowIndicatorColumn,
int32_t& e) {
if (leftRowIndicatorColumn == NULL && rightRowIndicatorColumn == NULL) {
e = BCExceptionIllegalArgument;
return NULL;
}
CBC_BarcodeMetadata* barcodeMetadata =
getBarcodeMetadata(leftRowIndicatorColumn, rightRowIndicatorColumn);
if (barcodeMetadata == NULL) {
e = BCExceptionCannotMetadata;
return NULL;
}
CBC_BoundingBox* leftboundingBox =
adjustBoundingBox(leftRowIndicatorColumn, e);
if (e != BCExceptionNO) {
delete barcodeMetadata;
return NULL;
}
CBC_BoundingBox* rightboundingBox =
adjustBoundingBox(rightRowIndicatorColumn, e);
if (e != BCExceptionNO) {
delete barcodeMetadata;
return NULL;
}
CBC_BoundingBox* boundingBox =
CBC_BoundingBox::merge(leftboundingBox, rightboundingBox, e);
if (e != BCExceptionNO) {
delete barcodeMetadata;
return NULL;
}
CBC_DetectionResult* detectionresult =
new CBC_DetectionResult(barcodeMetadata, boundingBox);
return detectionresult;
}
CBC_BoundingBox* CBC_PDF417ScanningDecoder::adjustBoundingBox(
CBC_DetectionResultRowIndicatorColumn* rowIndicatorColumn,
int32_t& e) {
if (rowIndicatorColumn == NULL) {
return NULL;
}
CFX_Int32Array* rowHeights = rowIndicatorColumn->getRowHeights(e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
int32_t maxRowHeight = getMax(*rowHeights);
int32_t missingStartRows = 0;
for (int32_t i = 0; i < rowHeights->GetSize(); i++) {
int32_t rowHeight = rowHeights->GetAt(i);
missingStartRows += maxRowHeight - rowHeight;
if (rowHeight > 0) {
break;
}
}
CFX_PtrArray* codewords = rowIndicatorColumn->getCodewords();
for (int32_t row = 0; missingStartRows > 0 && codewords->GetAt(row) == NULL;
row++) {
missingStartRows--;
}
int32_t missingEndRows = 0;
for (int32_t row1 = rowHeights->GetSize() - 1; row1 >= 0; row1--) {
missingEndRows += maxRowHeight - rowHeights->GetAt(row1);
if (rowHeights->GetAt(row1) > 0) {
break;
}
}
for (int32_t row2 = codewords->GetSize() - 1;
missingEndRows > 0 && codewords->GetAt(row2) == NULL; row2--) {
missingEndRows--;
}
CBC_BoundingBox* boundingBox =
rowIndicatorColumn->getBoundingBox()->addMissingRows(
missingStartRows, missingEndRows, rowIndicatorColumn->isLeft(), e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
return boundingBox;
}
int32_t CBC_PDF417ScanningDecoder::getMax(CFX_Int32Array& values) {
int32_t maxValue = -1;
for (int32_t i = 0; i < values.GetSize(); i++) {
int32_t value = values.GetAt(i);
maxValue = maxValue > value ? maxValue : value;
}
return maxValue;
}
CBC_BarcodeMetadata* CBC_PDF417ScanningDecoder::getBarcodeMetadata(
CBC_DetectionResultRowIndicatorColumn* leftRowIndicatorColumn,
CBC_DetectionResultRowIndicatorColumn* rightRowIndicatorColumn) {
CBC_BarcodeMetadata* leftBarcodeMetadata = NULL;
CBC_BarcodeMetadata* rightBarcodeMetadata = NULL;
if (leftRowIndicatorColumn == NULL ||
(leftBarcodeMetadata = leftRowIndicatorColumn->getBarcodeMetadata()) ==
NULL) {
return rightRowIndicatorColumn == NULL
? NULL
: rightRowIndicatorColumn->getBarcodeMetadata();
}
if (rightRowIndicatorColumn == NULL ||
(rightBarcodeMetadata = rightRowIndicatorColumn->getBarcodeMetadata()) ==
NULL) {
return leftRowIndicatorColumn == NULL
? NULL
: leftRowIndicatorColumn->getBarcodeMetadata();
}
if (leftBarcodeMetadata->getColumnCount() !=
rightBarcodeMetadata->getColumnCount() &&
leftBarcodeMetadata->getErrorCorrectionLevel() !=
rightBarcodeMetadata->getErrorCorrectionLevel() &&
leftBarcodeMetadata->getRowCount() !=
rightBarcodeMetadata->getRowCount()) {
delete leftBarcodeMetadata;
delete rightBarcodeMetadata;
return NULL;
}
delete rightBarcodeMetadata;
return leftBarcodeMetadata;
}
CBC_DetectionResultRowIndicatorColumn*
CBC_PDF417ScanningDecoder::getRowIndicatorColumn(CBC_CommonBitMatrix* image,
CBC_BoundingBox* boundingBox,
CBC_ResultPoint startPoint,
FX_BOOL leftToRight,
int32_t minCodewordWidth,
int32_t maxCodewordWidth) {
CBC_DetectionResultRowIndicatorColumn* rowIndicatorColumn =
new CBC_DetectionResultRowIndicatorColumn(boundingBox, leftToRight);
for (int32_t i = 0; i < 2; i++) {
int32_t increment = i == 0 ? 1 : -1;
int32_t startColumn = (int32_t)startPoint.GetX();
for (int32_t imageRow = (int32_t)startPoint.GetY();
imageRow <= boundingBox->getMaxY() &&
imageRow >= boundingBox->getMinY();
imageRow += increment) {
CBC_Codeword* codeword =
detectCodeword(image, 0, image->GetWidth(), leftToRight, startColumn,
imageRow, minCodewordWidth, maxCodewordWidth);
if (codeword != NULL) {
rowIndicatorColumn->setCodeword(imageRow, codeword);
if (leftToRight) {
startColumn = codeword->getStartX();
} else {
startColumn = codeword->getEndX();
}
}
}
}
return rowIndicatorColumn;
}
void CBC_PDF417ScanningDecoder::adjustCodewordCount(
CBC_DetectionResult* detectionResult,
CFX_PtrArray* barcodeMatrix,
int32_t& e) {
CFX_Int32Array* numberOfCodewords =
((CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(0))->GetAt(1))
->getValue();
int32_t calculatedNumberOfCodewords =
detectionResult->getBarcodeColumnCount() *
detectionResult->getBarcodeRowCount() -
getNumberOfECCodeWords(detectionResult->getBarcodeECLevel());
if (numberOfCodewords->GetSize() == 0) {
if (calculatedNumberOfCodewords < 1 ||
calculatedNumberOfCodewords >
CBC_PDF417Common::MAX_CODEWORDS_IN_BARCODE) {
e = BCExceptiontNotFoundInstance;
delete numberOfCodewords;
BC_EXCEPTION_CHECK_ReturnVoid(e);
}
((CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(0))->GetAt(1))
->setValue(calculatedNumberOfCodewords);
} else if (numberOfCodewords->GetAt(0) != calculatedNumberOfCodewords) {
((CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(0))->GetAt(1))
->setValue(calculatedNumberOfCodewords);
}
delete numberOfCodewords;
}
CBC_CommonDecoderResult* CBC_PDF417ScanningDecoder::createDecoderResult(
CBC_DetectionResult* detectionResult,
int32_t& e) {
CFX_PtrArray* barcodeMatrix = createBarcodeMatrix(detectionResult);
adjustCodewordCount(detectionResult, barcodeMatrix, e);
if (e != BCExceptionNO) {
for (int32_t i = 0; i < barcodeMatrix->GetSize(); i++) {
CFX_PtrArray* temp = (CFX_PtrArray*)barcodeMatrix->GetAt(i);
for (int32_t j = 0; j < temp->GetSize(); j++) {
delete (CBC_BarcodeValue*)temp->GetAt(j);
}
temp->RemoveAll();
delete temp;
}
barcodeMatrix->RemoveAll();
delete barcodeMatrix;
return NULL;
}
CFX_Int32Array erasures;
CFX_Int32Array codewords;
codewords.SetSize(detectionResult->getBarcodeRowCount() *
detectionResult->getBarcodeColumnCount());
CFX_PtrArray ambiguousIndexValuesList;
CFX_Int32Array ambiguousIndexesList;
for (int32_t row = 0; row < detectionResult->getBarcodeRowCount(); row++) {
for (int32_t l = 0; l < detectionResult->getBarcodeColumnCount(); l++) {
CFX_Int32Array* values =
((CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(row))
->GetAt(l + 1))
->getValue();
int32_t codewordIndex =
row * detectionResult->getBarcodeColumnCount() + l;
if (values->GetSize() == 0) {
erasures.Add(codewordIndex);
} else if (values->GetSize() == 1) {
codewords[codewordIndex] = values->GetAt(0);
} else {
ambiguousIndexesList.Add(codewordIndex);
ambiguousIndexValuesList.Add(values);
}
}
}
CFX_PtrArray ambiguousIndexValues;
ambiguousIndexValues.SetSize(ambiguousIndexValuesList.GetSize());
for (int32_t i = 0; i < ambiguousIndexValues.GetSize(); i++) {
ambiguousIndexValues.SetAt(i, ambiguousIndexValuesList.GetAt(i));
}
for (int32_t l = 0; l < barcodeMatrix->GetSize(); l++) {
CFX_PtrArray* temp = (CFX_PtrArray*)barcodeMatrix->GetAt(l);
for (int32_t j = 0; j < temp->GetSize(); j++) {
delete (CBC_BarcodeValue*)temp->GetAt(j);
}
temp->RemoveAll();
delete temp;
}
barcodeMatrix->RemoveAll();
delete barcodeMatrix;
CBC_CommonDecoderResult* decoderResult =
createDecoderResultFromAmbiguousValues(
detectionResult->getBarcodeECLevel(), codewords, erasures,
ambiguousIndexesList, ambiguousIndexValues, e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
return decoderResult;
}
CBC_CommonDecoderResult*
CBC_PDF417ScanningDecoder::createDecoderResultFromAmbiguousValues(
int32_t ecLevel,
CFX_Int32Array& codewords,
CFX_Int32Array& erasureArray,
CFX_Int32Array& ambiguousIndexes,
CFX_PtrArray& ambiguousIndexValues,
int32_t& e) {
CFX_Int32Array ambiguousIndexCount;
ambiguousIndexCount.SetSize(ambiguousIndexes.GetSize());
int32_t tries = 100;
while (tries-- > 0) {
for (int32_t l = 0; l < ambiguousIndexCount.GetSize(); l++) {
codewords[ambiguousIndexes[l]] =
((CFX_Int32Array*)ambiguousIndexValues.GetAt(l))
->GetAt(ambiguousIndexCount[l]);
}
CBC_CommonDecoderResult* decoderResult =
decodeCodewords(codewords, ecLevel, erasureArray, e);
if (e != BCExceptionNO) {
e = BCExceptionNO;
continue;
} else {
return decoderResult;
}
if (ambiguousIndexCount.GetSize() == 0) {
e = BCExceptionChecksumInstance;
return NULL;
}
for (int32_t i = 0; i < ambiguousIndexCount.GetSize(); i++) {
if (ambiguousIndexCount[i] <
((CFX_Int32Array*)(ambiguousIndexValues.GetAt(i)))->GetSize() - 1) {
ambiguousIndexCount[i]++;
break;
} else {
ambiguousIndexCount[i] = 0;
if (i == ambiguousIndexCount.GetSize() - 1) {
e = BCExceptionChecksumInstance;
return NULL;
}
}
}
}
e = BCExceptionChecksumInstance;
return NULL;
}
CFX_PtrArray* CBC_PDF417ScanningDecoder::createBarcodeMatrix(
CBC_DetectionResult* detectionResult) {
CFX_PtrArray* barcodeMatrix = new CFX_PtrArray;
barcodeMatrix->SetSize(detectionResult->getBarcodeRowCount());
CFX_PtrArray* temp = NULL;
int32_t colume = 0;
for (int32_t row = 0; row < barcodeMatrix->GetSize(); row++) {
temp = new CFX_PtrArray;
temp->SetSize(detectionResult->getBarcodeColumnCount() + 2);
for (colume = 0; colume < detectionResult->getBarcodeColumnCount() + 2;
colume++) {
temp->SetAt(colume, new CBC_BarcodeValue());
}
barcodeMatrix->SetAt(row, temp);
}
colume = -1;
for (int32_t i = 0;
i < detectionResult->getDetectionResultColumns().GetSize(); i++) {
CBC_DetectionResultColumn* detectionResultColumn =
(CBC_DetectionResultColumn*)detectionResult->getDetectionResultColumns()
.GetAt(i);
colume++;
if (detectionResultColumn == NULL) {
continue;
}
CFX_PtrArray* temp = detectionResultColumn->getCodewords();
for (int32_t l = 0; l < temp->GetSize(); l++) {
CBC_Codeword* codeword = (CBC_Codeword*)temp->GetAt(l);
if (codeword == NULL || codeword->getRowNumber() == -1) {
continue;
}
((CBC_BarcodeValue*)((CFX_PtrArray*)barcodeMatrix->GetAt(
codeword->getRowNumber()))
->GetAt(colume))
->setValue(codeword->getValue());
}
}
return barcodeMatrix;
}
FX_BOOL CBC_PDF417ScanningDecoder::isValidBarcodeColumn(
CBC_DetectionResult* detectionResult,
int32_t barcodeColumn) {
return barcodeColumn >= 0 &&
barcodeColumn <= detectionResult->getBarcodeColumnCount() + 1;
}
int32_t CBC_PDF417ScanningDecoder::getStartColumn(
CBC_DetectionResult* detectionResult,
int32_t barcodeColumn,
int32_t imageRow,
FX_BOOL leftToRight) {
int32_t offset = leftToRight ? 1 : -1;
CBC_Codeword* codeword = NULL;
if (isValidBarcodeColumn(detectionResult, barcodeColumn - offset)) {
codeword = detectionResult->getDetectionResultColumn(barcodeColumn - offset)
->getCodeword(imageRow);
}
if (codeword != NULL) {
return leftToRight ? codeword->getEndX() : codeword->getStartX();
}
codeword = detectionResult->getDetectionResultColumn(barcodeColumn)
->getCodewordNearby(imageRow);
if (codeword != NULL) {
return leftToRight ? codeword->getStartX() : codeword->getEndX();
}
if (isValidBarcodeColumn(detectionResult, barcodeColumn - offset)) {
codeword = detectionResult->getDetectionResultColumn(barcodeColumn - offset)
->getCodewordNearby(imageRow);
}
if (codeword != NULL) {
return leftToRight ? codeword->getEndX() : codeword->getStartX();
}
int32_t skippedColumns = 0;
while (isValidBarcodeColumn(detectionResult, barcodeColumn - offset)) {
barcodeColumn -= offset;
for (int32_t i = 0;
i < detectionResult->getDetectionResultColumn(barcodeColumn)
->getCodewords()
->GetSize();
i++) {
CBC_Codeword* previousRowCodeword =
(CBC_Codeword*)detectionResult->getDetectionResultColumn(
barcodeColumn)
->getCodewords()
->GetAt(i);
if (previousRowCodeword != NULL) {
return (leftToRight ? previousRowCodeword->getEndX()
: previousRowCodeword->getStartX()) +
offset * skippedColumns * (previousRowCodeword->getEndX() -
previousRowCodeword->getStartX());
}
}
skippedColumns++;
}
return leftToRight ? detectionResult->getBoundingBox()->getMinX()
: detectionResult->getBoundingBox()->getMaxX();
}
CBC_Codeword* CBC_PDF417ScanningDecoder::detectCodeword(
CBC_CommonBitMatrix* image,
int32_t minColumn,
int32_t maxColumn,
FX_BOOL leftToRight,
int32_t startColumn,
int32_t imageRow,
int32_t minCodewordWidth,
int32_t maxCodewordWidth) {
startColumn = adjustCodewordStartColumn(image, minColumn, maxColumn,
leftToRight, startColumn, imageRow);
CFX_Int32Array* moduleBitCount = getModuleBitCount(
image, minColumn, maxColumn, leftToRight, startColumn, imageRow);
if (moduleBitCount == NULL) {
return NULL;
}
int32_t endColumn;
int32_t codewordBitCount = CBC_PDF417Common::getBitCountSum(*moduleBitCount);
if (leftToRight) {
endColumn = startColumn + codewordBitCount;
} else {
for (int32_t i = 0; i<moduleBitCount->GetSize()>> 1; i++) {
int32_t tmpCount = moduleBitCount->GetAt(i);
moduleBitCount->SetAt(
i, moduleBitCount->GetAt(moduleBitCount->GetSize() - 1 - i));
moduleBitCount->SetAt(moduleBitCount->GetSize() - 1 - i, tmpCount);
}
endColumn = startColumn;
startColumn = endColumn - codewordBitCount;
}
int32_t decodedValue =
CBC_PDF417CodewordDecoder::getDecodedValue(*moduleBitCount);
int32_t codeword = CBC_PDF417Common::getCodeword(decodedValue);
delete moduleBitCount;
if (codeword == -1) {
return NULL;
}
return new CBC_Codeword(startColumn, endColumn,
getCodewordBucketNumber(decodedValue), codeword);
}
CFX_Int32Array* CBC_PDF417ScanningDecoder::getModuleBitCount(
CBC_CommonBitMatrix* image,
int32_t minColumn,
int32_t maxColumn,
FX_BOOL leftToRight,
int32_t startColumn,
int32_t imageRow) {
int32_t imageColumn = startColumn;
CFX_Int32Array* moduleBitCount = new CFX_Int32Array;
moduleBitCount->SetSize(8);
int32_t moduleNumber = 0;
int32_t increment = leftToRight ? 1 : -1;
FX_BOOL previousPixelValue = leftToRight;
while (((leftToRight && imageColumn < maxColumn) ||
(!leftToRight && imageColumn >= minColumn)) &&
moduleNumber < moduleBitCount->GetSize()) {
if (image->Get(imageColumn, imageRow) == previousPixelValue) {
moduleBitCount->SetAt(moduleNumber,
moduleBitCount->GetAt(moduleNumber) + 1);
imageColumn += increment;
} else {
moduleNumber++;
previousPixelValue = !previousPixelValue;
}
}
if (moduleNumber == moduleBitCount->GetSize() ||
(((leftToRight && imageColumn == maxColumn) ||
(!leftToRight && imageColumn == minColumn)) &&
moduleNumber == moduleBitCount->GetSize() - 1)) {
return moduleBitCount;
}
delete moduleBitCount;
return NULL;
}
int32_t CBC_PDF417ScanningDecoder::getNumberOfECCodeWords(
int32_t barcodeECLevel) {
return 2 << barcodeECLevel;
}
int32_t CBC_PDF417ScanningDecoder::adjustCodewordStartColumn(
CBC_CommonBitMatrix* image,
int32_t minColumn,
int32_t maxColumn,
FX_BOOL leftToRight,
int32_t codewordStartColumn,
int32_t imageRow) {
int32_t correctedStartColumn = codewordStartColumn;
int32_t increment = leftToRight ? -1 : 1;
for (int32_t i = 0; i < 2; i++) {
while (((leftToRight && correctedStartColumn >= minColumn) ||
(!leftToRight && correctedStartColumn < maxColumn)) &&
leftToRight == image->Get(correctedStartColumn, imageRow)) {
if (abs(codewordStartColumn - correctedStartColumn) >
CODEWORD_SKEW_SIZE) {
return codewordStartColumn;
}
correctedStartColumn += increment;
}
increment = -increment;
leftToRight = !leftToRight;
}
return correctedStartColumn;
}
FX_BOOL CBC_PDF417ScanningDecoder::checkCodewordSkew(int32_t codewordSize,
int32_t minCodewordWidth,
int32_t maxCodewordWidth) {
return minCodewordWidth - CODEWORD_SKEW_SIZE <= codewordSize &&
codewordSize <= maxCodewordWidth + CODEWORD_SKEW_SIZE;
}
CBC_CommonDecoderResult* CBC_PDF417ScanningDecoder::decodeCodewords(
CFX_Int32Array& codewords,
int32_t ecLevel,
CFX_Int32Array& erasures,
int32_t& e) {
if (codewords.GetSize() == 0) {
e = BCExceptionFormatInstance;
return NULL;
}
int32_t numECCodewords = 1 << (ecLevel + 1);
correctErrors(codewords, erasures, numECCodewords, e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
verifyCodewordCount(codewords, numECCodewords, e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
CFX_ByteString bytestring;
CBC_CommonDecoderResult* decoderResult = CBC_DecodedBitStreamPaser::decode(
codewords, bytestring.FormatInteger(ecLevel), e);
BC_EXCEPTION_CHECK_ReturnValue(e, NULL);
return decoderResult;
}
int32_t CBC_PDF417ScanningDecoder::correctErrors(CFX_Int32Array& codewords,
CFX_Int32Array& erasures,
int32_t numECCodewords,
int32_t& e) {
if ((erasures.GetSize() != 0 &&
erasures.GetSize() > (numECCodewords / 2 + MAX_ERRORS)) ||
numECCodewords < 0 || numECCodewords > MAX_EC_CODEWORDS) {
e = BCExceptionChecksumInstance;
return -1;
}
int32_t result = CBC_PDF417ECErrorCorrection::decode(
codewords, numECCodewords, erasures, e);
BC_EXCEPTION_CHECK_ReturnValue(e, -1);
return result;
}
void CBC_PDF417ScanningDecoder::verifyCodewordCount(CFX_Int32Array& codewords,
int32_t numECCodewords,
int32_t& e) {
if (codewords.GetSize() < 4) {
e = BCExceptionFormatInstance;
return;
}
int32_t numberOfCodewords = codewords.GetAt(0);
if (numberOfCodewords > codewords.GetSize()) {
e = BCExceptionFormatInstance;
return;
}
if (numberOfCodewords == 0) {
if (numECCodewords < codewords.GetSize()) {
codewords[0] = codewords.GetSize() - numECCodewords;
} else {
e = BCExceptionFormatInstance;
return;
}
}
}
CFX_Int32Array* CBC_PDF417ScanningDecoder::getBitCountForCodeword(
int32_t codeword) {
CFX_Int32Array* result = new CFX_Int32Array;
result->SetSize(8);
int32_t previousValue = 0;
int32_t i = result->GetSize() - 1;
while (TRUE) {
if ((codeword & 0x1) != previousValue) {
previousValue = codeword & 0x1;
i--;
if (i < 0) {
break;
}
}
result->SetAt(i, result->GetAt(i) + 1);
codeword >>= 1;
}
return result;
}
int32_t CBC_PDF417ScanningDecoder::getCodewordBucketNumber(int32_t codeword) {
CFX_Int32Array* array = getBitCountForCodeword(codeword);
int32_t result = getCodewordBucketNumber(*array);
delete array;
return result;
}
int32_t CBC_PDF417ScanningDecoder::getCodewordBucketNumber(
CFX_Int32Array& moduleBitCount) {
return (moduleBitCount.GetAt(0) - moduleBitCount.GetAt(2) +
moduleBitCount.GetAt(4) - moduleBitCount.GetAt(6) + 9) %
9;
}
|
loilo-inc/loilo-pdf
|
lib/src/main/cpp/external/pdfium/xfa/src/fxbarcode/pdf417/BC_PDF417ScanningDecoder.cpp
|
C++
|
apache-2.0
| 29,539
|
// Copyright 2006 The RE2 Authors. All Rights Reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
#ifndef RE2_REGEXP_H_
#define RE2_REGEXP_H_
// --- SPONSORED LINK --------------------------------------------------
// If you want to use this library for regular expression matching,
// you should use re2/re2.h, which provides a class RE2 that
// mimics the PCRE interface provided by PCRE's C++ wrappers.
// This header describes the low-level interface used to implement RE2
// and may change in backwards-incompatible ways from time to time.
// In contrast, RE2's interface will not.
// ---------------------------------------------------------------------
// Regular expression library: parsing, execution, and manipulation
// of regular expressions.
//
// Any operation that traverses the Regexp structures should be written
// using Regexp::Walker (see walker-inl.h), not recursively, because deeply nested
// regular expressions such as x++++++++++++++++++++... might cause recursive
// traversals to overflow the stack.
//
// It is the caller's responsibility to provide appropriate mutual exclusion
// around manipulation of the regexps. RE2 does this.
//
// PARSING
//
// Regexp::Parse parses regular expressions encoded in UTF-8.
// The default syntax is POSIX extended regular expressions,
// with the following changes:
//
// 1. Backreferences (optional in POSIX EREs) are not supported.
// (Supporting them precludes the use of DFA-based
// matching engines.)
//
// 2. Collating elements and collation classes are not supported.
// (No one has needed or wanted them.)
//
// The exact syntax accepted can be modified by passing flags to
// Regexp::Parse. In particular, many of the basic Perl additions
// are available. The flags are documented below (search for LikePerl).
//
// If parsed with the flag Regexp::Latin1, both the regular expression
// and the input to the matching routines are assumed to be encoded in
// Latin-1, not UTF-8.
//
// EXECUTION
//
// Once Regexp has parsed a regular expression, it provides methods
// to search text using that regular expression. These methods are
// implemented via calling out to other regular expression libraries.
// (Let's call them the sublibraries.)
//
// To call a sublibrary, Regexp does not simply prepare a
// string version of the regular expression and hand it to the
// sublibrary. Instead, Regexp prepares, from its own parsed form, the
// corresponding internal representation used by the sublibrary.
// This has the drawback of needing to know the internal representation
// used by the sublibrary, but it has two important benefits:
//
// 1. The syntax and meaning of regular expressions is guaranteed
// to be that used by Regexp's parser, not the syntax expected
// by the sublibrary. Regexp might accept a restricted or
// expanded syntax for regular expressions as compared with
// the sublibrary. As long as Regexp can translate from its
// internal form into the sublibrary's, clients need not know
// exactly which sublibrary they are using.
//
// 2. The sublibrary parsers are bypassed. For whatever reason,
// sublibrary regular expression parsers often have security
// problems. For example, plan9grep's regular expression parser
// has a buffer overflow in its handling of large character
// classes, and PCRE's parser has had buffer overflow problems
// in the past. Security-team requires sandboxing of sublibrary
// regular expression parsers. Avoiding the sublibrary parsers
// avoids the sandbox.
//
// The execution methods we use now are provided by the compiled form,
// Prog, described in prog.h
//
// MANIPULATION
//
// Unlike other regular expression libraries, Regexp makes its parsed
// form accessible to clients, so that client code can analyze the
// parsed regular expressions.
#include <stdint.h>
#include <map>
#include <set>
#include <string>
#include "util/util.h"
#include "util/logging.h"
#include "util/utf.h"
#include "re2/stringpiece.h"
namespace re2 {
// Keep in sync with string list kOpcodeNames[] in testing/dump.cc
enum RegexpOp {
// Matches no strings.
kRegexpNoMatch = 1,
// Matches empty string.
kRegexpEmptyMatch,
// Matches rune_.
kRegexpLiteral,
// Matches runes_.
kRegexpLiteralString,
// Matches concatenation of sub_[0..nsub-1].
kRegexpConcat,
// Matches union of sub_[0..nsub-1].
kRegexpAlternate,
// Matches sub_[0] zero or more times.
kRegexpStar,
// Matches sub_[0] one or more times.
kRegexpPlus,
// Matches sub_[0] zero or one times.
kRegexpQuest,
// Matches sub_[0] at least min_ times, at most max_ times.
// max_ == -1 means no upper limit.
kRegexpRepeat,
// Parenthesized (capturing) subexpression. Index is cap_.
// Optionally, capturing name is name_.
kRegexpCapture,
// Matches any character.
kRegexpAnyChar,
// Matches any byte [sic].
kRegexpAnyByte,
// Matches empty string at beginning of line.
kRegexpBeginLine,
// Matches empty string at end of line.
kRegexpEndLine,
// Matches word boundary "\b".
kRegexpWordBoundary,
// Matches not-a-word boundary "\B".
kRegexpNoWordBoundary,
// Matches empty string at beginning of text.
kRegexpBeginText,
// Matches empty string at end of text.
kRegexpEndText,
// Matches character class given by cc_.
kRegexpCharClass,
// Forces match of entire expression right now,
// with match ID match_id_ (used by RE2::Set).
kRegexpHaveMatch,
kMaxRegexpOp = kRegexpHaveMatch,
};
// Keep in sync with string list in regexp.cc
enum RegexpStatusCode {
// No error
kRegexpSuccess = 0,
// Unexpected error
kRegexpInternalError,
// Parse errors
kRegexpBadEscape, // bad escape sequence
kRegexpBadCharClass, // bad character class
kRegexpBadCharRange, // bad character class range
kRegexpMissingBracket, // missing closing ]
kRegexpMissingParen, // missing closing )
kRegexpUnexpectedParen, // unexpected closing )
kRegexpTrailingBackslash, // at end of regexp
kRegexpRepeatArgument, // repeat argument missing, e.g. "*"
kRegexpRepeatSize, // bad repetition argument
kRegexpRepeatOp, // bad repetition operator
kRegexpBadPerlOp, // bad perl operator
kRegexpBadUTF8, // invalid UTF-8 in regexp
kRegexpBadNamedCapture, // bad named capture
};
// Error status for certain operations.
class RegexpStatus {
public:
RegexpStatus() : code_(kRegexpSuccess), tmp_(NULL) {}
~RegexpStatus() { delete tmp_; }
void set_code(RegexpStatusCode code) { code_ = code; }
void set_error_arg(const StringPiece& error_arg) { error_arg_ = error_arg; }
void set_tmp(std::string* tmp) { delete tmp_; tmp_ = tmp; }
RegexpStatusCode code() const { return code_; }
const StringPiece& error_arg() const { return error_arg_; }
bool ok() const { return code() == kRegexpSuccess; }
// Copies state from status.
void Copy(const RegexpStatus& status);
// Returns text equivalent of code, e.g.:
// "Bad character class"
static std::string CodeText(RegexpStatusCode code);
// Returns text describing error, e.g.:
// "Bad character class: [z-a]"
std::string Text() const;
private:
RegexpStatusCode code_; // Kind of error
StringPiece error_arg_; // Piece of regexp containing syntax error.
std::string* tmp_; // Temporary storage, possibly where error_arg_ is.
RegexpStatus(const RegexpStatus&) = delete;
RegexpStatus& operator=(const RegexpStatus&) = delete;
};
// Compiled form; see prog.h
class Prog;
struct RuneRange {
RuneRange() : lo(0), hi(0) { }
RuneRange(int l, int h) : lo(l), hi(h) { }
Rune lo;
Rune hi;
};
// Less-than on RuneRanges treats a == b if they overlap at all.
// This lets us look in a set to find the range covering a particular Rune.
struct RuneRangeLess {
bool operator()(const RuneRange& a, const RuneRange& b) const {
return a.hi < b.lo;
}
};
class CharClassBuilder;
class CharClass {
public:
void Delete();
typedef RuneRange* iterator;
iterator begin() { return ranges_; }
iterator end() { return ranges_ + nranges_; }
int size() { return nrunes_; }
bool empty() { return nrunes_ == 0; }
bool full() { return nrunes_ == Runemax+1; }
bool FoldsASCII() { return folds_ascii_; }
bool Contains(Rune r);
CharClass* Negate();
private:
CharClass(); // not implemented
~CharClass(); // not implemented
static CharClass* New(int maxranges);
friend class CharClassBuilder;
bool folds_ascii_;
int nrunes_;
RuneRange *ranges_;
int nranges_;
CharClass(const CharClass&) = delete;
CharClass& operator=(const CharClass&) = delete;
};
class Regexp {
public:
// Flags for parsing. Can be ORed together.
enum ParseFlags {
NoParseFlags = 0,
FoldCase = 1<<0, // Fold case during matching (case-insensitive).
Literal = 1<<1, // Treat s as literal string instead of a regexp.
ClassNL = 1<<2, // Allow char classes like [^a-z] and \D and \s
// and [[:space:]] to match newline.
DotNL = 1<<3, // Allow . to match newline.
MatchNL = ClassNL | DotNL,
OneLine = 1<<4, // Treat ^ and $ as only matching at beginning and
// end of text, not around embedded newlines.
// (Perl's default)
Latin1 = 1<<5, // Regexp and text are in Latin1, not UTF-8.
NonGreedy = 1<<6, // Repetition operators are non-greedy by default.
PerlClasses = 1<<7, // Allow Perl character classes like \d.
PerlB = 1<<8, // Allow Perl's \b and \B.
PerlX = 1<<9, // Perl extensions:
// non-capturing parens - (?: )
// non-greedy operators - *? +? ?? {}?
// flag edits - (?i) (?-i) (?i: )
// i - FoldCase
// m - !OneLine
// s - DotNL
// U - NonGreedy
// line ends: \A \z
// \Q and \E to disable/enable metacharacters
// (?P<name>expr) for named captures
// \C to match any single byte
UnicodeGroups = 1<<10, // Allow \p{Han} for Unicode Han group
// and \P{Han} for its negation.
NeverNL = 1<<11, // Never match NL, even if the regexp mentions
// it explicitly.
NeverCapture = 1<<12, // Parse all parens as non-capturing.
// As close to Perl as we can get.
LikePerl = ClassNL | OneLine | PerlClasses | PerlB | PerlX |
UnicodeGroups,
// Internal use only.
WasDollar = 1<<13, // on kRegexpEndText: was $ in regexp text
AllParseFlags = (1<<14)-1,
};
// Get. No set, Regexps are logically immutable once created.
RegexpOp op() { return static_cast<RegexpOp>(op_); }
int nsub() { return nsub_; }
bool simple() { return simple_ != 0; }
ParseFlags parse_flags() { return static_cast<ParseFlags>(parse_flags_); }
int Ref(); // For testing.
Regexp** sub() {
if(nsub_ <= 1)
return &subone_;
else
return submany_;
}
int min() { DCHECK_EQ(op_, kRegexpRepeat); return min_; }
int max() { DCHECK_EQ(op_, kRegexpRepeat); return max_; }
Rune rune() { DCHECK_EQ(op_, kRegexpLiteral); return rune_; }
CharClass* cc() { DCHECK_EQ(op_, kRegexpCharClass); return cc_; }
int cap() { DCHECK_EQ(op_, kRegexpCapture); return cap_; }
const std::string* name() { DCHECK_EQ(op_, kRegexpCapture); return name_; }
Rune* runes() { DCHECK_EQ(op_, kRegexpLiteralString); return runes_; }
int nrunes() { DCHECK_EQ(op_, kRegexpLiteralString); return nrunes_; }
int match_id() { DCHECK_EQ(op_, kRegexpHaveMatch); return match_id_; }
// Increments reference count, returns object as convenience.
Regexp* Incref();
// Decrements reference count and deletes this object if count reaches 0.
void Decref();
// Parses string s to produce regular expression, returned.
// Caller must release return value with re->Decref().
// On failure, sets *status (if status != NULL) and returns NULL.
static Regexp* Parse(const StringPiece& s, ParseFlags flags,
RegexpStatus* status);
// Returns a _new_ simplified version of the current regexp.
// Does not edit the current regexp.
// Caller must release return value with re->Decref().
// Simplified means that counted repetition has been rewritten
// into simpler terms and all Perl/POSIX features have been
// removed. The result will capture exactly the same
// subexpressions the original did, unless formatted with ToString.
Regexp* Simplify();
friend class CoalesceWalker;
friend class SimplifyWalker;
// Parses the regexp src and then simplifies it and sets *dst to the
// string representation of the simplified form. Returns true on success.
// Returns false and sets *status (if status != NULL) on parse error.
static bool SimplifyRegexp(const StringPiece& src, ParseFlags flags,
std::string* dst, RegexpStatus* status);
// Returns the number of capturing groups in the regexp.
int NumCaptures();
friend class NumCapturesWalker;
// Returns a map from names to capturing group indices,
// or NULL if the regexp contains no named capture groups.
// The caller is responsible for deleting the map.
std::map<std::string, int>* NamedCaptures();
// Returns a map from capturing group indices to capturing group
// names or NULL if the regexp contains no named capture groups. The
// caller is responsible for deleting the map.
std::map<int, std::string>* CaptureNames();
// Returns a string representation of the current regexp,
// using as few parentheses as possible.
std::string ToString();
// Convenience functions. They consume the passed reference,
// so in many cases you should use, e.g., Plus(re->Incref(), flags).
// They do not consume allocated arrays like subs or runes.
static Regexp* Plus(Regexp* sub, ParseFlags flags);
static Regexp* Star(Regexp* sub, ParseFlags flags);
static Regexp* Quest(Regexp* sub, ParseFlags flags);
static Regexp* Concat(Regexp** subs, int nsubs, ParseFlags flags);
static Regexp* Alternate(Regexp** subs, int nsubs, ParseFlags flags);
static Regexp* Capture(Regexp* sub, ParseFlags flags, int cap);
static Regexp* Repeat(Regexp* sub, ParseFlags flags, int min, int max);
static Regexp* NewLiteral(Rune rune, ParseFlags flags);
static Regexp* NewCharClass(CharClass* cc, ParseFlags flags);
static Regexp* LiteralString(Rune* runes, int nrunes, ParseFlags flags);
static Regexp* HaveMatch(int match_id, ParseFlags flags);
// Like Alternate but does not factor out common prefixes.
static Regexp* AlternateNoFactor(Regexp** subs, int nsubs, ParseFlags flags);
// Debugging function. Returns string format for regexp
// that makes structure clear. Does NOT use regexp syntax.
std::string Dump();
// Helper traversal class, defined fully in walker-inl.h.
template<typename T> class Walker;
// Compile to Prog. See prog.h
// Reverse prog expects to be run over text backward.
// Construction and execution of prog will
// stay within approximately max_mem bytes of memory.
// If max_mem <= 0, a reasonable default is used.
Prog* CompileToProg(int64_t max_mem);
Prog* CompileToReverseProg(int64_t max_mem);
// Whether to expect this library to find exactly the same answer as PCRE
// when running this regexp. Most regexps do mimic PCRE exactly, but a few
// obscure cases behave differently. Technically this is more a property
// of the Prog than the Regexp, but the computation is much easier to do
// on the Regexp. See mimics_pcre.cc for the exact conditions.
bool MimicsPCRE();
// Benchmarking function.
void NullWalk();
// Whether every match of this regexp must be anchored and
// begin with a non-empty fixed string (perhaps after ASCII
// case-folding). If so, returns the prefix and the sub-regexp that
// follows it.
// Callers should expect *prefix, *foldcase and *suffix to be "zeroed"
// regardless of the return value.
bool RequiredPrefix(std::string* prefix, bool* foldcase,
Regexp** suffix);
// Whether every match of this regexp must be unanchored and
// begin with a non-empty fixed string (perhaps after ASCII
// case-folding). If so, returns the prefix.
// Callers should expect *prefix and *foldcase to be "zeroed"
// regardless of the return value.
bool RequiredPrefixForAccel(std::string* prefix, bool* foldcase);
private:
// Constructor allocates vectors as appropriate for operator.
explicit Regexp(RegexpOp op, ParseFlags parse_flags);
// Use Decref() instead of delete to release Regexps.
// This is private to catch deletes at compile time.
~Regexp();
void Destroy();
bool QuickDestroy();
// Helpers for Parse. Listed here so they can edit Regexps.
class ParseState;
friend class ParseState;
friend bool ParseCharClass(StringPiece* s, Regexp** out_re,
RegexpStatus* status);
// Helper for testing [sic].
friend bool RegexpEqualTestingOnly(Regexp*, Regexp*);
// Computes whether Regexp is already simple.
bool ComputeSimple();
// Constructor that generates a Star, Plus or Quest,
// squashing the pair if sub is also a Star, Plus or Quest.
static Regexp* StarPlusOrQuest(RegexpOp op, Regexp* sub, ParseFlags flags);
// Constructor that generates a concatenation or alternation,
// enforcing the limit on the number of subexpressions for
// a particular Regexp.
static Regexp* ConcatOrAlternate(RegexpOp op, Regexp** subs, int nsubs,
ParseFlags flags, bool can_factor);
// Returns the leading string that re starts with.
// The returned Rune* points into a piece of re,
// so it must not be used after the caller calls re->Decref().
static Rune* LeadingString(Regexp* re, int* nrune, ParseFlags* flags);
// Removes the first n leading runes from the beginning of re.
// Edits re in place.
static void RemoveLeadingString(Regexp* re, int n);
// Returns the leading regexp in re's top-level concatenation.
// The returned Regexp* points at re or a sub-expression of re,
// so it must not be used after the caller calls re->Decref().
static Regexp* LeadingRegexp(Regexp* re);
// Removes LeadingRegexp(re) from re and returns the remainder.
// Might edit re in place.
static Regexp* RemoveLeadingRegexp(Regexp* re);
// Simplifies an alternation of literal strings by factoring out
// common prefixes.
static int FactorAlternation(Regexp** sub, int nsub, ParseFlags flags);
friend class FactorAlternationImpl;
// Is a == b? Only efficient on regexps that have not been through
// Simplify yet - the expansion of a kRegexpRepeat will make this
// take a long time. Do not call on such regexps, hence private.
static bool Equal(Regexp* a, Regexp* b);
// Allocate space for n sub-regexps.
void AllocSub(int n) {
DCHECK(n >= 0 && static_cast<uint16_t>(n) == n);
if (n > 1)
submany_ = new Regexp*[n];
nsub_ = static_cast<uint16_t>(n);
}
// Add Rune to LiteralString
void AddRuneToString(Rune r);
// Swaps this with that, in place.
void Swap(Regexp *that);
// Operator. See description of operators above.
// uint8_t instead of RegexpOp to control space usage.
uint8_t op_;
// Is this regexp structure already simple
// (has it been returned by Simplify)?
// uint8_t instead of bool to control space usage.
uint8_t simple_;
// Flags saved from parsing and used during execution.
// (Only FoldCase is used.)
// uint16_t instead of ParseFlags to control space usage.
uint16_t parse_flags_;
// Reference count. Exists so that SimplifyRegexp can build
// regexp structures that are dags rather than trees to avoid
// exponential blowup in space requirements.
// uint16_t to control space usage.
// The standard regexp routines will never generate a
// ref greater than the maximum repeat count (kMaxRepeat),
// but even so, Incref and Decref consult an overflow map
// when ref_ reaches kMaxRef.
uint16_t ref_;
static const uint16_t kMaxRef = 0xffff;
// Subexpressions.
// uint16_t to control space usage.
// Concat and Alternate handle larger numbers of subexpressions
// by building concatenation or alternation trees.
// Other routines should call Concat or Alternate instead of
// filling in sub() by hand.
uint16_t nsub_;
static const uint16_t kMaxNsub = 0xffff;
union {
Regexp** submany_; // if nsub_ > 1
Regexp* subone_; // if nsub_ == 1
};
// Extra space for parse and teardown stacks.
Regexp* down_;
// Arguments to operator. See description of operators above.
union {
struct { // Repeat
int max_;
int min_;
};
struct { // Capture
int cap_;
std::string* name_;
};
struct { // LiteralString
int nrunes_;
Rune* runes_;
};
struct { // CharClass
// These two could be in separate union members,
// but it wouldn't save any space (there are other two-word structs)
// and keeping them separate avoids confusion during parsing.
CharClass* cc_;
CharClassBuilder* ccb_;
};
Rune rune_; // Literal
int match_id_; // HaveMatch
void *the_union_[2]; // as big as any other element, for memset
};
Regexp(const Regexp&) = delete;
Regexp& operator=(const Regexp&) = delete;
};
// Character class set: contains non-overlapping, non-abutting RuneRanges.
typedef std::set<RuneRange, RuneRangeLess> RuneRangeSet;
class CharClassBuilder {
public:
CharClassBuilder();
typedef RuneRangeSet::iterator iterator;
iterator begin() { return ranges_.begin(); }
iterator end() { return ranges_.end(); }
int size() { return nrunes_; }
bool empty() { return nrunes_ == 0; }
bool full() { return nrunes_ == Runemax+1; }
bool Contains(Rune r);
bool FoldsASCII();
bool AddRange(Rune lo, Rune hi); // returns whether class changed
CharClassBuilder* Copy();
void AddCharClass(CharClassBuilder* cc);
void Negate();
void RemoveAbove(Rune r);
CharClass* GetCharClass();
void AddRangeFlags(Rune lo, Rune hi, Regexp::ParseFlags parse_flags);
private:
static const uint32_t AlphaMask = (1<<26) - 1;
uint32_t upper_; // bitmap of A-Z
uint32_t lower_; // bitmap of a-z
int nrunes_;
RuneRangeSet ranges_;
CharClassBuilder(const CharClassBuilder&) = delete;
CharClassBuilder& operator=(const CharClassBuilder&) = delete;
};
// Bitwise ops on ParseFlags produce ParseFlags.
inline Regexp::ParseFlags operator|(Regexp::ParseFlags a,
Regexp::ParseFlags b) {
return static_cast<Regexp::ParseFlags>(
static_cast<int>(a) | static_cast<int>(b));
}
inline Regexp::ParseFlags operator^(Regexp::ParseFlags a,
Regexp::ParseFlags b) {
return static_cast<Regexp::ParseFlags>(
static_cast<int>(a) ^ static_cast<int>(b));
}
inline Regexp::ParseFlags operator&(Regexp::ParseFlags a,
Regexp::ParseFlags b) {
return static_cast<Regexp::ParseFlags>(
static_cast<int>(a) & static_cast<int>(b));
}
inline Regexp::ParseFlags operator~(Regexp::ParseFlags a) {
// Attempting to produce a value out of enum's range has undefined behaviour.
return static_cast<Regexp::ParseFlags>(
~static_cast<int>(a) & static_cast<int>(Regexp::AllParseFlags));
}
} // namespace re2
#endif // RE2_REGEXP_H_
|
tsandall/opa
|
wasm/src/re2/re2/regexp.h
|
C
|
apache-2.0
| 23,984
|
require 'spec_helper'
type_class = Puppet::Type.type(:image)
describe type_class do
let :params do
[
:name
]
end
let :properties do
[
:description,
:location,
:id,
:size,
:cpu_hot_plug,
:cpu_hot_unplug,
:ram_hot_plug,
:ram_hot_unplug,
:nic_hot_plug,
:nic_hot_unplug,
:disc_virtio_hot_plug,
:disc_virtio_hot_unplug,
:disc_scsi_hot_plug,
:disc_scsi_hot_unplug,
:public,
:image_type,
:image_aliases,
:licence_type
]
end
it 'should have expected properties' do
properties.each do |property|
expect(type_class.properties.map(&:name)).to be_include(property)
end
end
it 'should have expected parameters' do
params.each do |param|
expect(type_class.parameters).to be_include(param)
end
end
it 'should require a name' do
expect {
type_class.new({})
}.to raise_error(Puppet::Error, 'Title or name must be provided')
end
end
|
profitbricks/profitbricks-puppet
|
spec/unit/type/image_spec.rb
|
Ruby
|
apache-2.0
| 1,013
|
// scalac: -Xfatal-warnings
sealed trait Fails
case class VarArgs1(a: String*) extends Fails
case class FailsChild2(a: Seq[String]) extends Fails
object FailsTest {
def t1(f: Fails) = f match { // inexhaustive on both, was: no warning
case VarArgs1(_) => ???
}
def t2(f: Fails) = f match { // inexhaustive on VarArgs1
case FailsChild2(_) => ???
}
def t12(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning
case VarArgs1(_) => ???
case FailsChild2(_) => ???
}
def t21(f: Fails) = f match { // inexhaustive on VarArgs1, was: no warning
case FailsChild2(_) => ???
case VarArgs1(_) => ???
}
}
sealed trait Works
case class SeqArgs1(a: Seq[String]) extends Works
case class SeqArgs2(a: Seq[String]) extends Works
object WorksTest {
def t12(f: Works) = f match {
case SeqArgs1(_) => ???
case SeqArgs2(_) => ???
}
def t1(f: Works) = f match { // inexhaustive on SeqArgs2
case SeqArgs1(_) => ???
}
}
|
scala/scala
|
test/files/neg/t8178.scala
|
Scala
|
apache-2.0
| 983
|
/*
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.print;
import java.awt.GraphicsEnvironment;
import java.awt.Toolkit;
import javax.print.attribute.*;
import javax.print.attribute.standard.*;
import javax.print.DocFlavor;
import javax.print.DocPrintJob;
import javax.print.PrintService;
import javax.print.ServiceUIFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Locale;
import java.util.Date;
import java.util.Arrays;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import javax.print.event.PrintServiceAttributeListener;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.HttpURLConnection;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.DataInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.Iterator;
import java.util.HashSet;
import java.util.Map;
public class IPPPrintService implements PrintService, SunPrinterJobService {
public static final boolean debugPrint;
private static final String debugPrefix = "IPPPrintService>> ";
protected static void debug_println(String str) {
if (debugPrint) {
System.out.println(str);
}
}
private static final String FORCE_PIPE_PROP = "sun.print.ippdebug";
static {
@SuppressWarnings("removal")
String debugStr = java.security.AccessController.doPrivileged(
new sun.security.action.GetPropertyAction(FORCE_PIPE_PROP));
debugPrint = "true".equalsIgnoreCase(debugStr);
}
private String printer;
private URI myURI;
private URL myURL;
private transient ServiceNotifier notifier = null;
private static int MAXCOPIES = 1000;
private static short MAX_ATTRIBUTE_LENGTH = 255;
private CUPSPrinter cps;
private HttpURLConnection urlConnection = null;
private DocFlavor[] supportedDocFlavors;
private Class<?>[] supportedCats;
private MediaTray[] mediaTrays;
private MediaSizeName[] mediaSizeNames;
private CustomMediaSizeName[] customMediaSizeNames;
private int defaultMediaIndex;
private int[] rawResolutions = null;
private PrinterResolution[] printerResolutions = null;
private boolean isCupsPrinter;
private boolean init;
private Boolean isPS;
private HashMap<String, AttributeClass> getAttMap;
private boolean pngImagesAdded = false;
private boolean gifImagesAdded = false;
private boolean jpgImagesAdded = false;
/**
* IPP Status Codes
*/
private static final byte STATUSCODE_SUCCESS = 0x00;
/**
* IPP Group Tags. Each tag is used once before the first attribute
* of that group.
*/
// operation attributes group
private static final byte GRPTAG_OP_ATTRIBUTES = 0x01;
// job attributes group
private static final byte GRPTAG_JOB_ATTRIBUTES = 0x02;
// printer attributes group
private static final byte GRPTAG_PRINTER_ATTRIBUTES = 0x04;
// used as the last tag in an IPP message.
private static final byte GRPTAG_END_ATTRIBUTES = 0x03;
/**
* IPP Operation codes
*/
// gets the attributes for a printer
public static final String OP_GET_ATTRIBUTES = "000B";
// gets the default printer
public static final String OP_CUPS_GET_DEFAULT = "4001";
// gets the list of printers
public static final String OP_CUPS_GET_PRINTERS = "4002";
/**
* List of all PrintRequestAttributes. This is used
* for looping through all the IPP attribute name.
*/
private static Object[] printReqAttribDefault = {
Chromaticity.COLOR,
new Copies(1),
Fidelity.FIDELITY_FALSE,
Finishings.NONE,
//new JobHoldUntil(new Date()),
//new JobImpressions(0),
//JobImpressions,
//JobKOctets,
//JobMediaSheets,
new JobName("", Locale.getDefault()),
//JobPriority,
JobSheets.NONE,
(Media)MediaSizeName.NA_LETTER,
//MediaPrintableArea.class, // not an IPP attribute
//MultipleDocumentHandling.SINGLE_DOCUMENT,
new NumberUp(1),
OrientationRequested.PORTRAIT,
new PageRanges(1),
//PresentationDirection,
// CUPS does not supply printer-resolution attribute
//new PrinterResolution(300, 300, PrinterResolution.DPI),
//PrintQuality.NORMAL,
new RequestingUserName("", Locale.getDefault()),
//SheetCollate.UNCOLLATED, //CUPS has no sheet collate?
Sides.ONE_SIDED,
};
/**
* List of all PrintServiceAttributes. This is used
* for looping through all the IPP attribute name.
*/
private static Object[][] serviceAttributes = {
{ColorSupported.class, "color-supported"},
{PagesPerMinute.class, "pages-per-minute"},
{PagesPerMinuteColor.class, "pages-per-minute-color"},
{PDLOverrideSupported.class, "pdl-override-supported"},
{PrinterInfo.class, "printer-info"},
{PrinterIsAcceptingJobs.class, "printer-is-accepting-jobs"},
{PrinterLocation.class, "printer-location"},
{PrinterMakeAndModel.class, "printer-make-and-model"},
{PrinterMessageFromOperator.class, "printer-message-from-operator"},
{PrinterMoreInfo.class, "printer-more-info"},
{PrinterMoreInfoManufacturer.class, "printer-more-info-manufacturer"},
{PrinterName.class, "printer-name"},
{PrinterState.class, "printer-state"},
{PrinterStateReasons.class, "printer-state-reasons"},
{PrinterURI.class, "printer-uri"},
{QueuedJobCount.class, "queued-job-count"}
};
/**
* List of DocFlavors, grouped based on matching mime-type.
* NOTE: For any change in the predefined DocFlavors, it must be reflected
* here also.
*/
// PDF DocFlavors
private static DocFlavor[] appPDF = {
DocFlavor.BYTE_ARRAY.PDF,
DocFlavor.INPUT_STREAM.PDF,
DocFlavor.URL.PDF
};
// Postscript DocFlavors
private static DocFlavor[] appPostScript = {
DocFlavor.BYTE_ARRAY.POSTSCRIPT,
DocFlavor.INPUT_STREAM.POSTSCRIPT,
DocFlavor.URL.POSTSCRIPT
};
// Autosense DocFlavors
private static DocFlavor[] appOctetStream = {
DocFlavor.BYTE_ARRAY.AUTOSENSE,
DocFlavor.INPUT_STREAM.AUTOSENSE,
DocFlavor.URL.AUTOSENSE
};
// Text DocFlavors
private static DocFlavor[] textPlain = {
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_8,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16BE,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_UTF_16LE,
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_US_ASCII,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_8,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16BE,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_UTF_16LE,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_US_ASCII,
DocFlavor.URL.TEXT_PLAIN_UTF_8,
DocFlavor.URL.TEXT_PLAIN_UTF_16,
DocFlavor.URL.TEXT_PLAIN_UTF_16BE,
DocFlavor.URL.TEXT_PLAIN_UTF_16LE,
DocFlavor.URL.TEXT_PLAIN_US_ASCII,
DocFlavor.CHAR_ARRAY.TEXT_PLAIN,
DocFlavor.STRING.TEXT_PLAIN,
DocFlavor.READER.TEXT_PLAIN
};
private static DocFlavor[] textPlainHost = {
DocFlavor.BYTE_ARRAY.TEXT_PLAIN_HOST,
DocFlavor.INPUT_STREAM.TEXT_PLAIN_HOST,
DocFlavor.URL.TEXT_PLAIN_HOST
};
// JPG DocFlavors
private static DocFlavor[] imageJPG = {
DocFlavor.BYTE_ARRAY.JPEG,
DocFlavor.INPUT_STREAM.JPEG,
DocFlavor.URL.JPEG
};
// GIF DocFlavors
private static DocFlavor[] imageGIF = {
DocFlavor.BYTE_ARRAY.GIF,
DocFlavor.INPUT_STREAM.GIF,
DocFlavor.URL.GIF
};
// PNG DocFlavors
private static DocFlavor[] imagePNG = {
DocFlavor.BYTE_ARRAY.PNG,
DocFlavor.INPUT_STREAM.PNG,
DocFlavor.URL.PNG
};
// HTML DocFlavors
private static DocFlavor[] textHtml = {
DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_8,
DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16,
DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16BE,
DocFlavor.BYTE_ARRAY.TEXT_HTML_UTF_16LE,
DocFlavor.BYTE_ARRAY.TEXT_HTML_US_ASCII,
DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_8,
DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16,
DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16BE,
DocFlavor.INPUT_STREAM.TEXT_HTML_UTF_16LE,
DocFlavor.INPUT_STREAM.TEXT_HTML_US_ASCII,
DocFlavor.URL.TEXT_HTML_UTF_8,
DocFlavor.URL.TEXT_HTML_UTF_16,
DocFlavor.URL.TEXT_HTML_UTF_16BE,
DocFlavor.URL.TEXT_HTML_UTF_16LE,
DocFlavor.URL.TEXT_HTML_US_ASCII,
// These are not handled in UnixPrintJob so commenting these
// for now.
/*
DocFlavor.CHAR_ARRAY.TEXT_HTML,
DocFlavor.STRING.TEXT_HTML,
DocFlavor.READER.TEXT_HTML,
*/
};
private static DocFlavor[] textHtmlHost = {
DocFlavor.BYTE_ARRAY.TEXT_HTML_HOST,
DocFlavor.INPUT_STREAM.TEXT_HTML_HOST,
DocFlavor.URL.TEXT_HTML_HOST,
};
// PCL DocFlavors
private static DocFlavor[] appPCL = {
DocFlavor.BYTE_ARRAY.PCL,
DocFlavor.INPUT_STREAM.PCL,
DocFlavor.URL.PCL
};
// List of all DocFlavors, used in looping
// through all supported mime-types
private static Object[] allDocFlavors = {
appPDF, appPostScript, appOctetStream,
textPlain, imageJPG, imageGIF, imagePNG,
textHtml, appPCL,
};
IPPPrintService(String name, URL url) {
if ((name == null) || (url == null)){
throw new IllegalArgumentException("null uri or printer name");
}
try {
printer = java.net.URLDecoder.decode(name, "UTF-8");
} catch (java.io.UnsupportedEncodingException e) {
printer = name;
}
supportedDocFlavors = null;
supportedCats = null;
mediaSizeNames = null;
customMediaSizeNames = null;
mediaTrays = null;
myURL = url;
cps = null;
isCupsPrinter = false;
init = false;
defaultMediaIndex = -1;
String host = myURL.getHost();
if (host!=null && host.equals(CUPSPrinter.getServer())) {
isCupsPrinter = true;
try {
myURI = new URI("ipp://"+host+
"/printers/"+printer);
debug_println(debugPrefix+"IPPPrintService myURI : "+myURI);
} catch (java.net.URISyntaxException e) {
throw new IllegalArgumentException("invalid url");
}
}
}
IPPPrintService(String name, String uriStr, boolean isCups) {
if ((name == null) || (uriStr == null)){
throw new IllegalArgumentException("null uri or printer name");
}
try {
printer = java.net.URLDecoder.decode(name, "UTF-8");
} catch (java.io.UnsupportedEncodingException e) {
printer = name;
}
supportedDocFlavors = null;
supportedCats = null;
mediaSizeNames = null;
customMediaSizeNames = null;
mediaTrays = null;
cps = null;
init = false;
defaultMediaIndex = -1;
try {
myURL =
new URL(uriStr.replaceFirst("ipp", "http"));
} catch (Exception e) {
IPPPrintService.debug_println(debugPrefix+
" IPPPrintService, myURL="+
myURL+" Exception= "+
e);
throw new IllegalArgumentException("invalid url");
}
isCupsPrinter = isCups;
try {
myURI = new URI(uriStr);
debug_println(debugPrefix+"IPPPrintService myURI : "+myURI);
} catch (java.net.URISyntaxException e) {
throw new IllegalArgumentException("invalid uri");
}
}
/*
* Initialize mediaSizeNames, mediaTrays and other attributes.
* Media size/trays are initialized to non-null values, may be 0-length
* array.
* NOTE: Must be called from a synchronized block only.
*/
private void initAttributes() {
if (!init) {
// init customMediaSizeNames
customMediaSizeNames = new CustomMediaSizeName[0];
if ((urlConnection = getIPPConnection(myURL)) == null) {
mediaSizeNames = new MediaSizeName[0];
mediaTrays = new MediaTray[0];
debug_println(debugPrefix+"initAttributes, NULL urlConnection ");
init = true;
return;
}
// get all supported attributes through IPP
opGetAttributes();
if (isCupsPrinter) {
// note, it is possible to query media in CUPS using IPP
// right now we always get it from PPD.
// maybe use "&& (usePPD)" later?
// Another reason why we use PPD is because
// IPP currently does not support it but PPD does.
try {
cps = new CUPSPrinter(printer);
mediaSizeNames = cps.getMediaSizeNames();
mediaTrays = cps.getMediaTrays();
customMediaSizeNames = cps.getCustomMediaSizeNames();
defaultMediaIndex = cps.getDefaultMediaIndex();
rawResolutions = cps.getRawResolutions();
urlConnection.disconnect();
init = true;
return;
} catch (Exception e) {
IPPPrintService.debug_println(debugPrefix+
"initAttributes, error creating CUPSPrinter e="+e);
}
}
// use IPP to get all media,
Media[] allMedia = getSupportedMedia();
ArrayList<Media> sizeList = new ArrayList<>();
ArrayList<Media> trayList = new ArrayList<>();
for (int i=0; i<allMedia.length; i++) {
if (allMedia[i] instanceof MediaSizeName) {
sizeList.add(allMedia[i]);
} else if (allMedia[i] instanceof MediaTray) {
trayList.add(allMedia[i]);
}
}
if (sizeList != null) {
mediaSizeNames = new MediaSizeName[sizeList.size()];
mediaSizeNames = sizeList.toArray(mediaSizeNames);
}
if (trayList != null) {
mediaTrays = new MediaTray[trayList.size()];
mediaTrays = trayList.toArray(mediaTrays);
}
urlConnection.disconnect();
init = true;
}
}
public DocPrintJob createPrintJob() {
@SuppressWarnings("removal")
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPrintJobAccess();
}
// REMIND: create IPPPrintJob
return new UnixPrintJob(this);
}
public synchronized Object
getSupportedAttributeValues(Class<? extends Attribute> category,
DocFlavor flavor,
AttributeSet attributes)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!Attribute.class.isAssignableFrom(category)) {
throw new IllegalArgumentException(category +
" does not implement Attribute");
}
if (flavor != null) {
if (!isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException(flavor +
" is an unsupported flavor");
} else if (isAutoSense(flavor)) {
return null;
}
}
if (!isAttributeCategorySupported(category)) {
return null;
}
/* Test if the flavor is compatible with the attributes */
if (!isDestinationSupported(flavor, attributes)) {
return null;
}
initAttributes();
/* Test if the flavor is compatible with the category */
if ((category == Copies.class) ||
(category == CopiesSupported.class)) {
if (flavor == null ||
!(flavor.equals(DocFlavor.INPUT_STREAM.POSTSCRIPT) ||
flavor.equals(DocFlavor.URL.POSTSCRIPT) ||
flavor.equals(DocFlavor.BYTE_ARRAY.POSTSCRIPT))) {
CopiesSupported cs = new CopiesSupported(1, MAXCOPIES);
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get(cs.getName()) : null;
if (attribClass != null) {
int[] range = attribClass.getIntRangeValue();
cs = new CopiesSupported(range[0], range[1]);
}
return cs;
} else {
return null;
}
} else if (category == Chromaticity.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE) ||
!isIPPSupportedImages(flavor.getMimeType())) {
Chromaticity[]arr = new Chromaticity[1];
arr[0] = Chromaticity.COLOR;
return (arr);
} else {
return null;
}
} else if (category == Destination.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
try {
return new Destination((new File("out.ps")).toURI());
} catch (SecurityException se) {
try {
return new Destination(new URI("file:out.ps"));
} catch (URISyntaxException e) {
return null;
}
}
}
return null;
} else if (category == Fidelity.class) {
Fidelity []arr = new Fidelity[2];
arr[0] = Fidelity.FIDELITY_FALSE;
arr[1] = Fidelity.FIDELITY_TRUE;
return arr;
} else if (category == Finishings.class) {
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get("finishings-supported")
: null;
if (attribClass != null) {
int[] finArray = attribClass.getArrayOfIntValues();
if ((finArray != null) && (finArray.length > 0)) {
Finishings[] finSup = new Finishings[finArray.length];
for (int i=0; i<finArray.length; i++) {
finSup[i] = Finishings.NONE;
Finishings[] fAll = (Finishings[])
(new ExtFinishing(100)).getAll();
for (int j=0; j<fAll.length; j++) {
if (fAll[j] == null) {
continue;
}
if (finArray[i] == fAll[j].getValue()) {
finSup[i] = fAll[j];
break;
}
}
}
return finSup;
}
}
} else if (category == JobName.class) {
return new JobName("Java Printing", null);
} else if (category == JobSheets.class) {
JobSheets[] arr = new JobSheets[2];
arr[0] = JobSheets.NONE;
arr[1] = JobSheets.STANDARD;
return arr;
} else if (category == Media.class) {
Media[] allMedia = new Media[mediaSizeNames.length+
mediaTrays.length];
for (int i=0; i<mediaSizeNames.length; i++) {
allMedia[i] = mediaSizeNames[i];
}
for (int i=0; i<mediaTrays.length; i++) {
allMedia[i+mediaSizeNames.length] = mediaTrays[i];
}
if (allMedia.length == 0) {
allMedia = new Media[1];
allMedia[0] = (Media)getDefaultAttributeValue(Media.class);
}
return allMedia;
} else if (category == MediaPrintableArea.class) {
MediaPrintableArea[] mpas = null;
if (cps != null) {
mpas = cps.getMediaPrintableArea();
}
if (mpas == null) {
mpas = new MediaPrintableArea[1];
mpas[0] = (MediaPrintableArea)
getDefaultAttributeValue(MediaPrintableArea.class);
}
if ((attributes == null) || (attributes.size() == 0)) {
ArrayList<MediaPrintableArea> printableList =
new ArrayList<MediaPrintableArea>();
for (int i=0; i<mpas.length; i++) {
if (mpas[i] != null) {
printableList.add(mpas[i]);
}
}
if (printableList.size() > 0) {
mpas = new MediaPrintableArea[printableList.size()];
printableList.toArray(mpas);
}
return mpas;
}
int match = -1;
Media media = (Media)attributes.get(Media.class);
if (media != null && media instanceof MediaSizeName) {
MediaSizeName msn = (MediaSizeName)media;
// case when no supported mediasizenames are reported
// check given media against the default
if (mediaSizeNames.length == 0 &&
msn.equals(getDefaultAttributeValue(Media.class))) {
//default printable area is that of default mediasize
return mpas;
}
for (int i=0; i<mediaSizeNames.length; i++) {
if (msn.equals(mediaSizeNames[i])) {
match = i;
}
}
}
if (match == -1) {
return null;
} else {
MediaPrintableArea []arr = new MediaPrintableArea[1];
arr[0] = mpas[match];
return arr;
}
} else if (category == NumberUp.class) {
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get("number-up-supported") : null;
if (attribClass != null) {
int[] values = attribClass.getArrayOfIntValues();
if (values != null) {
NumberUp[] nUp = new NumberUp[values.length];
for (int i=0; i<values.length; i++) {
nUp[i] = new NumberUp(values[i]);
}
return nUp;
} else {
return null;
}
}
} else if (category == OrientationRequested.class) {
if ((flavor != null) &&
(flavor.equals(DocFlavor.INPUT_STREAM.POSTSCRIPT) ||
flavor.equals(DocFlavor.URL.POSTSCRIPT) ||
flavor.equals(DocFlavor.BYTE_ARRAY.POSTSCRIPT))) {
return null;
}
boolean revPort = false;
OrientationRequested[] orientSup = null;
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get("orientation-requested-supported")
: null;
if (attribClass != null) {
int[] orientArray = attribClass.getArrayOfIntValues();
if ((orientArray != null) && (orientArray.length > 0)) {
orientSup =
new OrientationRequested[orientArray.length];
for (int i=0; i<orientArray.length; i++) {
switch (orientArray[i]) {
default:
case 3 :
orientSup[i] = OrientationRequested.PORTRAIT;
break;
case 4:
orientSup[i] = OrientationRequested.LANDSCAPE;
break;
case 5:
orientSup[i] =
OrientationRequested.REVERSE_LANDSCAPE;
break;
case 6:
orientSup[i] =
OrientationRequested.REVERSE_PORTRAIT;
revPort = true;
break;
}
}
}
}
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
if (revPort && flavor == null) {
OrientationRequested []orSup = new OrientationRequested[4];
orSup[0] = OrientationRequested.PORTRAIT;
orSup[1] = OrientationRequested.LANDSCAPE;
orSup[2] = OrientationRequested.REVERSE_LANDSCAPE;
orSup[3] = OrientationRequested.REVERSE_PORTRAIT;
return orSup;
} else {
OrientationRequested []orSup = new OrientationRequested[3];
orSup[0] = OrientationRequested.PORTRAIT;
orSup[1] = OrientationRequested.LANDSCAPE;
orSup[2] = OrientationRequested.REVERSE_LANDSCAPE;
return orSup;
}
} else {
return orientSup;
}
} else if (category == PageRanges.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
PageRanges []arr = new PageRanges[1];
arr[0] = new PageRanges(1, Integer.MAX_VALUE);
return arr;
} else {
// Returning null as this is not yet supported in UnixPrintJob.
return null;
}
} else if (category == RequestingUserName.class) {
String userName = "";
try {
userName = System.getProperty("user.name", "");
} catch (SecurityException se) {
}
return new RequestingUserName(userName, null);
} else if (category == Sides.class) {
// The printer takes care of Sides so if short-edge
// is chosen in a job, the rotation is done by the printer.
// Orientation is rotated by emulation if pageable
// or printable so if the document is in Landscape, this may
// result in double rotation.
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get("sides-supported")
: null;
if (attribClass != null) {
String[] sidesArray = attribClass.getArrayOfStringValues();
if ((sidesArray != null) && (sidesArray.length > 0)) {
Sides[] sidesSup = new Sides[sidesArray.length];
for (int i=0; i<sidesArray.length; i++) {
if (sidesArray[i].endsWith("long-edge")) {
sidesSup[i] = Sides.TWO_SIDED_LONG_EDGE;
} else if (sidesArray[i].endsWith("short-edge")) {
sidesSup[i] = Sides.TWO_SIDED_SHORT_EDGE;
} else {
sidesSup[i] = Sides.ONE_SIDED;
}
}
return sidesSup;
}
}
} else if (category == PrinterResolution.class) {
PrinterResolution[] supportedRes = getPrintResolutions();
if (supportedRes == null) {
return null;
}
PrinterResolution []arr =
new PrinterResolution[supportedRes.length];
System.arraycopy(supportedRes, 0, arr, 0, supportedRes.length);
return arr;
}
return null;
}
//This class is for getting all pre-defined Finishings
@SuppressWarnings("serial") // JDK implementation class
private class ExtFinishing extends Finishings {
ExtFinishing(int value) {
super(100); // 100 to avoid any conflicts with predefined values.
}
EnumSyntax[] getAll() {
EnumSyntax[] es = super.getEnumValueTable();
return es;
}
}
public AttributeSet getUnsupportedAttributes(DocFlavor flavor,
AttributeSet attributes) {
if (flavor != null && !isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException("flavor " + flavor +
"is not supported");
}
if (attributes == null) {
return null;
}
Attribute attr;
AttributeSet unsupp = new HashAttributeSet();
Attribute []attrs = attributes.toArray();
for (int i=0; i<attrs.length; i++) {
try {
attr = attrs[i];
if (!isAttributeCategorySupported(attr.getCategory())) {
unsupp.add(attr);
} else if (!isAttributeValueSupported(attr, flavor,
attributes)) {
unsupp.add(attr);
}
} catch (ClassCastException e) {
}
}
if (unsupp.isEmpty()) {
return null;
} else {
return unsupp;
}
}
public synchronized DocFlavor[] getSupportedDocFlavors() {
if (supportedDocFlavors != null) {
int len = supportedDocFlavors.length;
DocFlavor[] copyflavors = new DocFlavor[len];
System.arraycopy(supportedDocFlavors, 0, copyflavors, 0, len);
return copyflavors;
}
initAttributes();
if ((getAttMap != null) &&
getAttMap.containsKey("document-format-supported")) {
AttributeClass attribClass =
getAttMap.get("document-format-supported");
if (attribClass != null) {
String mimeType;
boolean psSupported = false;
String[] docFlavors = attribClass.getArrayOfStringValues();
DocFlavor[] flavors;
HashSet<Object> docList = new HashSet<>();
int j;
String hostEnc = DocFlavor.hostEncoding.
toLowerCase(Locale.ENGLISH);
boolean addHostEncoding = !hostEnc.equals("utf-8") &&
!hostEnc.equals("utf-16") && !hostEnc.equals("utf-16be") &&
!hostEnc.equals("utf-16le") && !hostEnc.equals("us-ascii");
for (int i = 0; i < docFlavors.length; i++) {
for (j=0; j<allDocFlavors.length; j++) {
flavors = (DocFlavor[])allDocFlavors[j];
mimeType = flavors[0].getMimeType();
if (mimeType.startsWith(docFlavors[i])) {
docList.addAll(Arrays.asList(flavors));
if (mimeType.equals("text/plain") &&
addHostEncoding) {
docList.add(Arrays.asList(textPlainHost));
} else if (mimeType.equals("text/html") &&
addHostEncoding) {
docList.add(Arrays.asList(textHtmlHost));
} else if (mimeType.equals("image/png")) {
pngImagesAdded = true;
} else if (mimeType.equals("image/gif")) {
gifImagesAdded = true;
} else if (mimeType.equals("image/jpeg")) {
jpgImagesAdded = true;
} else if (mimeType.indexOf("postscript") != -1) {
psSupported = true;
}
break;
}
}
// Not added? Create new DocFlavors
if (j == allDocFlavors.length) {
// make new DocFlavors
docList.add(new DocFlavor.BYTE_ARRAY(docFlavors[i]));
docList.add(new DocFlavor.INPUT_STREAM(docFlavors[i]));
docList.add(new DocFlavor.URL(docFlavors[i]));
}
}
// check if we need to add image DocFlavors
// and Pageable/Printable flavors
if (psSupported || isCupsPrinter) {
/*
Always add Pageable and Printable for CUPS
since it uses Filters to convert from Postscript
to device printer language.
*/
docList.add(DocFlavor.SERVICE_FORMATTED.PAGEABLE);
docList.add(DocFlavor.SERVICE_FORMATTED.PRINTABLE);
docList.addAll(Arrays.asList(imageJPG));
docList.addAll(Arrays.asList(imagePNG));
docList.addAll(Arrays.asList(imageGIF));
}
supportedDocFlavors = new DocFlavor[docList.size()];
docList.toArray(supportedDocFlavors);
int len = supportedDocFlavors.length;
DocFlavor[] copyflavors = new DocFlavor[len];
System.arraycopy(supportedDocFlavors, 0, copyflavors, 0, len);
return copyflavors;
}
}
DocFlavor[] flavor = new DocFlavor[2];
flavor[0] = DocFlavor.SERVICE_FORMATTED.PAGEABLE;
flavor[1] = DocFlavor.SERVICE_FORMATTED.PRINTABLE;
supportedDocFlavors = flavor;
return flavor;
}
public boolean isDocFlavorSupported(DocFlavor flavor) {
if (supportedDocFlavors == null) {
getSupportedDocFlavors();
}
if (supportedDocFlavors != null) {
for (int f=0; f<supportedDocFlavors.length; f++) {
if (flavor.equals(supportedDocFlavors[f])) {
return true;
}
}
}
return false;
}
/**
* Finds matching CustomMediaSizeName of given media.
*/
public CustomMediaSizeName findCustomMedia(MediaSizeName media) {
if (customMediaSizeNames == null) {
return null;
}
for (int i=0; i< customMediaSizeNames.length; i++) {
CustomMediaSizeName custom = customMediaSizeNames[i];
MediaSizeName msn = custom.getStandardMedia();
if (media.equals(msn)) {
return customMediaSizeNames[i];
}
}
return null;
}
/**
* Returns the matching standard Media using string comparison of names.
*/
private Media getIPPMedia(String mediaName) {
CustomMediaSizeName sampleSize = new CustomMediaSizeName("sample", "",
0, 0);
Media[] sizes = sampleSize.getSuperEnumTable();
for (int i=0; i<sizes.length; i++) {
if (mediaName.equals(""+sizes[i])) {
return sizes[i];
}
}
CustomMediaTray sampleTray = new CustomMediaTray("sample", "");
Media[] trays = sampleTray.getSuperEnumTable();
for (int i=0; i<trays.length; i++) {
if (mediaName.equals(""+trays[i])) {
return trays[i];
}
}
return null;
}
private Media[] getSupportedMedia() {
if ((getAttMap != null) &&
getAttMap.containsKey("media-supported")) {
AttributeClass attribClass = getAttMap.get("media-supported");
if (attribClass != null) {
String[] mediaVals = attribClass.getArrayOfStringValues();
Media msn;
Media[] mediaNames =
new Media[mediaVals.length];
for (int i=0; i<mediaVals.length; i++) {
msn = getIPPMedia(mediaVals[i]);
//REMIND: if null, create custom?
mediaNames[i] = msn;
}
return mediaNames;
}
}
return new Media[0];
}
public synchronized Class<?>[] getSupportedAttributeCategories() {
if (supportedCats != null) {
Class<?> [] copyCats = new Class<?>[supportedCats.length];
System.arraycopy(supportedCats, 0, copyCats, 0, copyCats.length);
return copyCats;
}
initAttributes();
ArrayList<Class<?>> catList = new ArrayList<>();
for (int i=0; i < printReqAttribDefault.length; i++) {
PrintRequestAttribute pra =
(PrintRequestAttribute)printReqAttribDefault[i];
if (getAttMap != null &&
getAttMap.containsKey(pra.getName()+"-supported")) {
catList.add(pra.getCategory());
}
}
// Some IPP printers like lexc710 do not have list of supported media
// but CUPS can get the media from PPD, so we still report as
// supported category.
if (isCupsPrinter) {
if (!catList.contains(Media.class)) {
catList.add(Media.class);
}
// Always add MediaPrintable for cups,
// because we can get it from PPD.
catList.add(MediaPrintableArea.class);
// this is already supported in UnixPrintJob
catList.add(Destination.class);
// It is unfortunate that CUPS doesn't provide a way to query
// if printer supports collation but since most printers
// now supports collation and that most OS has a way
// of setting it, it is a safe assumption to just always
// include SheetCollate as supported attribute.
catList.add(SheetCollate.class);
}
// With the assumption that Chromaticity is equivalent to
// ColorSupported.
if (getAttMap != null && getAttMap.containsKey("color-supported")) {
catList.add(Chromaticity.class);
}
// CUPS does not report printer resolution via IPP but it
// may be gleaned from the PPD.
PrinterResolution[] supportedRes = getPrintResolutions();
if (supportedRes != null && (supportedRes.length > 0)) {
catList.add(PrinterResolution.class);
}
if (GraphicsEnvironment.isHeadless() == false) {
catList.add(DialogOwner.class);
catList.add(DialogTypeSelection.class);
}
supportedCats = new Class<?>[catList.size()];
catList.toArray(supportedCats);
Class<?>[] copyCats = new Class<?>[supportedCats.length];
System.arraycopy(supportedCats, 0, copyCats, 0, copyCats.length);
return copyCats;
}
public boolean
isAttributeCategorySupported(Class<? extends Attribute> category)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!(Attribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException(category +
" is not an Attribute");
}
if (supportedCats == null) {
getSupportedAttributeCategories();
}
// It is safe to assume that Orientation is always supported
// and even if CUPS or an IPP device reports it as not,
// our renderer can do portrait, landscape and
// reverse landscape.
if (category == OrientationRequested.class) {
return true;
}
for (int i=0;i<supportedCats.length;i++) {
if (category == supportedCats[i]) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
public synchronized <T extends PrintServiceAttribute>
T getAttribute(Class<T> category)
{
if (category == null) {
throw new NullPointerException("category");
}
if (!(PrintServiceAttribute.class.isAssignableFrom(category))) {
throw new IllegalArgumentException("Not a PrintServiceAttribute");
}
initAttributes();
if (category == PrinterName.class) {
return (T)(new PrinterName(printer, null));
} else if (category == PrinterInfo.class) {
PrinterInfo pInfo = new PrinterInfo(printer, null);
AttributeClass ac = (getAttMap != null) ?
getAttMap.get(pInfo.getName())
: null;
if (ac != null) {
return (T)(new PrinterInfo(ac.getStringValue(), null));
}
return (T)pInfo;
} else if (category == QueuedJobCount.class) {
QueuedJobCount qjc = new QueuedJobCount(0);
AttributeClass ac = (getAttMap != null) ?
getAttMap.get(qjc.getName())
: null;
if (ac != null) {
qjc = new QueuedJobCount(ac.getIntValue());
}
return (T)qjc;
} else if (category == PrinterIsAcceptingJobs.class) {
PrinterIsAcceptingJobs accJob =
PrinterIsAcceptingJobs.ACCEPTING_JOBS;
AttributeClass ac = (getAttMap != null) ?
getAttMap.get(accJob.getName())
: null;
if ((ac != null) && (ac.getByteValue() == 0)) {
accJob = PrinterIsAcceptingJobs.NOT_ACCEPTING_JOBS;
}
return (T)accJob;
} else if (category == ColorSupported.class) {
ColorSupported cs = ColorSupported.SUPPORTED;
AttributeClass ac = (getAttMap != null) ?
getAttMap.get(cs.getName())
: null;
if ((ac != null) && (ac.getByteValue() == 0)) {
cs = ColorSupported.NOT_SUPPORTED;
}
return (T)cs;
} else if (category == PDLOverrideSupported.class) {
if (isCupsPrinter) {
// Documented: For CUPS this will always be false
return (T)PDLOverrideSupported.NOT_ATTEMPTED;
} else {
// REMIND: check attribute values
return (T)PDLOverrideSupported.NOT_ATTEMPTED;
}
} else if (category == PrinterURI.class) {
return (T)(new PrinterURI(myURI));
} else {
return null;
}
}
public synchronized PrintServiceAttributeSet getAttributes() {
// update getAttMap by sending again get-attributes IPP request
init = false;
initAttributes();
HashPrintServiceAttributeSet attrs =
new HashPrintServiceAttributeSet();
for (int i=0; i < serviceAttributes.length; i++) {
String name = (String)serviceAttributes[i][1];
if (getAttMap != null && getAttMap.containsKey(name)) {
@SuppressWarnings("unchecked")
Class<PrintServiceAttribute> c = (Class<PrintServiceAttribute>)serviceAttributes[i][0];
PrintServiceAttribute psa = getAttribute(c);
if (psa != null) {
attrs.add(psa);
}
}
}
return AttributeSetUtilities.unmodifiableView(attrs);
}
public boolean isIPPSupportedImages(String mimeType) {
if (supportedDocFlavors == null) {
getSupportedDocFlavors();
}
if (mimeType.equals("image/png") && pngImagesAdded) {
return true;
} else if (mimeType.equals("image/gif") && gifImagesAdded) {
return true;
} else if (mimeType.equals("image/jpeg") && jpgImagesAdded) {
return true;
}
return false;
}
private boolean isSupportedCopies(Copies copies) {
CopiesSupported cs = (CopiesSupported)
getSupportedAttributeValues(Copies.class, null, null);
int[][] members = cs.getMembers();
int min, max;
if ((members.length > 0) && (members[0].length > 0)) {
min = members[0][0];
max = members[0][1];
} else {
min = 1;
max = MAXCOPIES;
}
int value = copies.getValue();
return (value >= min && value <= max);
}
private boolean isAutoSense(DocFlavor flavor) {
if (flavor.equals(DocFlavor.BYTE_ARRAY.AUTOSENSE) ||
flavor.equals(DocFlavor.INPUT_STREAM.AUTOSENSE) ||
flavor.equals(DocFlavor.URL.AUTOSENSE)) {
return true;
}
else {
return false;
}
}
private synchronized boolean isSupportedMediaTray(MediaTray msn) {
initAttributes();
if (mediaTrays != null) {
for (int i=0; i<mediaTrays.length; i++) {
if (msn.equals(mediaTrays[i])) {
return true;
}
}
}
return false;
}
private synchronized boolean isSupportedMedia(MediaSizeName msn) {
initAttributes();
if (msn.equals((Media)getDefaultAttributeValue(Media.class))) {
return true;
}
for (int i=0; i<mediaSizeNames.length; i++) {
debug_println(debugPrefix+"isSupportedMedia, mediaSizeNames[i] "+mediaSizeNames[i]);
if (msn.equals(mediaSizeNames[i])) {
return true;
}
}
return false;
}
/* Return false if flavor is not null, pageable, nor printable and
* Destination is part of attributes.
*/
private boolean
isDestinationSupported(DocFlavor flavor, AttributeSet attributes) {
if ((attributes != null) &&
(attributes.get(Destination.class) != null) &&
!(flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
return true;
}
public boolean isAttributeValueSupported(Attribute attr,
DocFlavor flavor,
AttributeSet attributes) {
if (attr == null) {
throw new NullPointerException("null attribute");
}
if (flavor != null) {
if (!isDocFlavorSupported(flavor)) {
throw new IllegalArgumentException(flavor +
" is an unsupported flavor");
} else if (isAutoSense(flavor)) {
return false;
}
}
Class<? extends Attribute> category = attr.getCategory();
if (!isAttributeCategorySupported(category)) {
return false;
}
/* Test if the flavor is compatible with the attributes */
if (!isDestinationSupported(flavor, attributes)) {
return false;
}
/* Test if the flavor is compatible with the category */
if (attr.getCategory() == Chromaticity.class) {
if ((flavor == null) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE) ||
!isIPPSupportedImages(flavor.getMimeType())) {
return attr == Chromaticity.COLOR;
} else {
return false;
}
} else if (attr.getCategory() == Copies.class) {
return (flavor == null ||
!(flavor.equals(DocFlavor.INPUT_STREAM.POSTSCRIPT) ||
flavor.equals(DocFlavor.URL.POSTSCRIPT) ||
flavor.equals(DocFlavor.BYTE_ARRAY.POSTSCRIPT))) &&
isSupportedCopies((Copies)attr);
} else if (attr.getCategory() == Destination.class) {
if (flavor == null ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE)) {
URI uri = ((Destination)attr).getURI();
if ("file".equals(uri.getScheme()) &&
!uri.getSchemeSpecificPart().isEmpty()) {
return true;
}
}
return false;
} else if (attr.getCategory() == Media.class) {
if (attr instanceof MediaSizeName) {
return isSupportedMedia((MediaSizeName)attr);
}
if (attr instanceof MediaTray) {
return isSupportedMediaTray((MediaTray)attr);
}
} else if (attr.getCategory() == PageRanges.class) {
if (flavor != null &&
!(flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
} else if (attr.getCategory() == SheetCollate.class) {
if (flavor != null &&
!(flavor.equals(DocFlavor.SERVICE_FORMATTED.PAGEABLE) ||
flavor.equals(DocFlavor.SERVICE_FORMATTED.PRINTABLE))) {
return false;
}
} else if (attr.getCategory() == Sides.class) {
Sides[] sidesArray = (Sides[])getSupportedAttributeValues(
Sides.class,
flavor,
attributes);
if (sidesArray != null) {
for (int i=0; i<sidesArray.length; i++) {
if (sidesArray[i] == (Sides)attr) {
return true;
}
}
}
return false;
} else if (attr.getCategory() == OrientationRequested.class) {
OrientationRequested[] orientArray =
(OrientationRequested[])getSupportedAttributeValues(
OrientationRequested.class,
flavor,
attributes);
if (orientArray != null) {
for (int i=0; i<orientArray.length; i++) {
if (orientArray[i] == (OrientationRequested)attr) {
return true;
}
}
}
return false;
} else if (attr.getCategory() == PrinterResolution.class) {
if (attr instanceof PrinterResolution) {
return isSupportedResolution((PrinterResolution)attr);
}
} else if (attr.getCategory() == DialogOwner.class) {
DialogOwner owner = (DialogOwner)attr;
// ID not supported on any dialog type on Unix platforms.
if (DialogOwnerAccessor.getID(owner) != 0) {
return false;
}
// On Mac we have no control over the native dialog.
DialogTypeSelection dst = (attributes == null) ? null :
(DialogTypeSelection)attributes.get(DialogTypeSelection.class);
if (PrintServiceLookupProvider.isMac() &&
dst == DialogTypeSelection.NATIVE) {
return false;
}
// The other case is always a Swing dialog on all Unix platforms.
// So we only need to check that the toolkit supports
// always on top.
if (owner.getOwner() != null) {
return true;
} else {
return Toolkit.getDefaultToolkit().isAlwaysOnTopSupported();
}
} else if (attr.getCategory() == DialogTypeSelection.class) {
if (PrintServiceLookupProvider.isMac()) {
return true;
} else {
DialogTypeSelection dst = (DialogTypeSelection)attr;
return attr == DialogTypeSelection.COMMON;
}
}
return true;
}
public synchronized Object
getDefaultAttributeValue(Class<? extends Attribute> category)
{
if (category == null) {
throw new NullPointerException("null category");
}
if (!Attribute.class.isAssignableFrom(category)) {
throw new IllegalArgumentException(category +
" is not an Attribute");
}
if (!isAttributeCategorySupported(category)) {
return null;
}
initAttributes();
String catName = null;
for (int i=0; i < printReqAttribDefault.length; i++) {
PrintRequestAttribute pra =
(PrintRequestAttribute)printReqAttribDefault[i];
if (pra.getCategory() == category) {
catName = pra.getName();
break;
}
}
String attribName = catName+"-default";
AttributeClass attribClass = (getAttMap != null) ?
getAttMap.get(attribName) : null;
if (category == Copies.class) {
if (attribClass != null) {
return new Copies(attribClass.getIntValue());
} else {
return new Copies(1);
}
} else if (category == Chromaticity.class) {
return Chromaticity.COLOR;
} else if (category == Destination.class) {
try {
return new Destination((new File("out.ps")).toURI());
} catch (SecurityException se) {
try {
return new Destination(new URI("file:out.ps"));
} catch (URISyntaxException e) {
return null;
}
}
} else if (category == Fidelity.class) {
return Fidelity.FIDELITY_FALSE;
} else if (category == Finishings.class) {
return Finishings.NONE;
} else if (category == JobName.class) {
return new JobName("Java Printing", null);
} else if (category == JobSheets.class) {
if (attribClass != null &&
attribClass.getStringValue().equals("none")) {
return JobSheets.NONE;
} else {
return JobSheets.STANDARD;
}
} else if (category == Media.class) {
if (defaultMediaIndex == -1) {
defaultMediaIndex = 0;
}
if (mediaSizeNames.length == 0) {
String defaultCountry = Locale.getDefault().getCountry();
if (defaultCountry != null &&
(defaultCountry.isEmpty() ||
defaultCountry.equals(Locale.US.getCountry()) ||
defaultCountry.equals(Locale.CANADA.getCountry()))) {
return MediaSizeName.NA_LETTER;
} else {
return MediaSizeName.ISO_A4;
}
}
if (attribClass != null) {
String name = attribClass.getStringValue();
if (isCupsPrinter) {
return mediaSizeNames[defaultMediaIndex];
} else {
for (int i=0; i< mediaSizeNames.length; i++) {
if (mediaSizeNames[i].toString().indexOf(name) != -1) {
defaultMediaIndex = i;
return mediaSizeNames[defaultMediaIndex];
}
}
}
}
return mediaSizeNames[defaultMediaIndex];
} else if (category == MediaPrintableArea.class) {
MediaPrintableArea[] mpas;
if ((cps != null) &&
((mpas = cps.getMediaPrintableArea()) != null)) {
if (defaultMediaIndex == -1) {
// initializes value of defaultMediaIndex
getDefaultAttributeValue(Media.class);
}
return mpas[defaultMediaIndex];
} else {
String defaultCountry = Locale.getDefault().getCountry();
float iw, ih;
if (defaultCountry != null &&
(defaultCountry.isEmpty() ||
defaultCountry.equals(Locale.US.getCountry()) ||
defaultCountry.equals(Locale.CANADA.getCountry()))) {
iw = MediaSize.NA.LETTER.getX(Size2DSyntax.INCH) - 0.5f;
ih = MediaSize.NA.LETTER.getY(Size2DSyntax.INCH) - 0.5f;
} else {
iw = MediaSize.ISO.A4.getX(Size2DSyntax.INCH) - 0.5f;
ih = MediaSize.ISO.A4.getY(Size2DSyntax.INCH) - 0.5f;
}
return new MediaPrintableArea(0.25f, 0.25f, iw, ih,
MediaPrintableArea.INCH);
}
} else if (category == NumberUp.class) {
return new NumberUp(1); // for CUPS this is always 1
} else if (category == OrientationRequested.class) {
if (attribClass != null) {
switch (attribClass.getIntValue()) {
default:
case 3: return OrientationRequested.PORTRAIT;
case 4: return OrientationRequested.LANDSCAPE;
case 5: return OrientationRequested.REVERSE_LANDSCAPE;
case 6: return OrientationRequested.REVERSE_PORTRAIT;
}
} else {
return OrientationRequested.PORTRAIT;
}
} else if (category == PageRanges.class) {
if (attribClass != null) {
int[] range = attribClass.getIntRangeValue();
return new PageRanges(range[0], range[1]);
} else {
return new PageRanges(1, Integer.MAX_VALUE);
}
} else if (category == RequestingUserName.class) {
String userName = "";
try {
userName = System.getProperty("user.name", "");
} catch (SecurityException se) {
}
return new RequestingUserName(userName, null);
} else if (category == SheetCollate.class) {
return SheetCollate.UNCOLLATED;
} else if (category == Sides.class) {
if (attribClass != null) {
if (attribClass.getStringValue().endsWith("long-edge")) {
return Sides.TWO_SIDED_LONG_EDGE;
} else if (attribClass.getStringValue().endsWith(
"short-edge")) {
return Sides.TWO_SIDED_SHORT_EDGE;
}
}
return Sides.ONE_SIDED;
} else if (category == PrinterResolution.class) {
PrinterResolution[] supportedRes = getPrintResolutions();
if ((supportedRes != null) && (supportedRes.length > 0)) {
return supportedRes[0];
} else {
return new PrinterResolution(300, 300, PrinterResolution.DPI);
}
}
return null;
}
private PrinterResolution[] getPrintResolutions() {
if (printerResolutions == null) {
if (rawResolutions == null) {
printerResolutions = new PrinterResolution[0];
} else {
int numRes = rawResolutions.length / 2;
PrinterResolution[] pres = new PrinterResolution[numRes];
for (int i=0; i < numRes; i++) {
pres[i] = new PrinterResolution(rawResolutions[i*2],
rawResolutions[i*2+1],
PrinterResolution.DPI);
}
printerResolutions = pres;
}
}
return printerResolutions;
}
private boolean isSupportedResolution(PrinterResolution res) {
PrinterResolution[] supportedRes = getPrintResolutions();
if (supportedRes != null) {
for (int i=0; i<supportedRes.length; i++) {
if (res.equals(supportedRes[i])) {
return true;
}
}
}
return false;
}
public ServiceUIFactory getServiceUIFactory() {
return null;
}
public void wakeNotifier() {
synchronized (this) {
if (notifier != null) {
notifier.wake();
}
}
}
public void addPrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
synchronized (this) {
if (listener == null) {
return;
}
if (notifier == null) {
notifier = new ServiceNotifier(this);
}
notifier.addListener(listener);
}
}
public void removePrintServiceAttributeListener(
PrintServiceAttributeListener listener) {
synchronized (this) {
if (listener == null || notifier == null ) {
return;
}
notifier.removeListener(listener);
if (notifier.isEmpty()) {
notifier.stopNotifier();
notifier = null;
}
}
}
String getDest() {
return printer;
}
public String getName() {
/*
* Mac is using printer-info IPP attribute for its human-readable printer
* name and is also the identifier used in NSPrintInfo:setPrinter.
*/
if (PrintServiceLookupProvider.isMac()) {
PrintServiceAttributeSet psaSet = this.getAttributes();
if (psaSet != null) {
PrinterInfo pName = (PrinterInfo)psaSet.get(PrinterInfo.class);
if (pName != null) {
return pName.toString();
}
}
}
return printer;
}
public boolean usesClass(Class<?> c) {
return (c == sun.print.PSPrinterJob.class);
}
public static HttpURLConnection getIPPConnection(URL url) {
HttpURLConnection connection;
URLConnection urlc;
try {
urlc = url.openConnection();
} catch (java.io.IOException ioe) {
return null;
}
if (!(urlc instanceof HttpURLConnection)) {
return null;
}
connection = (HttpURLConnection)urlc;
connection.setUseCaches(false);
connection.setDoInput(true);
connection.setDoOutput(true);
connection.setRequestProperty("Content-type", "application/ipp");
return connection;
}
public synchronized boolean isPostscript() {
if (isPS == null) {
isPS = Boolean.TRUE;
if (isCupsPrinter) {
try {
urlConnection = getIPPConnection(
new URL(myURL+".ppd"));
InputStream is = urlConnection.getInputStream();
if (is != null) {
BufferedReader d =
new BufferedReader(new InputStreamReader(is,
Charset.forName("ISO-8859-1")));
String lineStr;
while ((lineStr = d.readLine()) != null) {
if (lineStr.startsWith("*cupsFilter:")) {
isPS = Boolean.FALSE;
break;
}
}
}
} catch (java.io.IOException e) {
debug_println(" isPostscript, e= "+e);
/* if PPD is not found, this may be a raw printer
and in this case it is assumed that it is a
Postscript printer */
// do nothing
}
}
}
return isPS.booleanValue();
}
private void opGetAttributes() {
try {
debug_println(debugPrefix+"opGetAttributes myURI "+myURI+" myURL "+myURL);
AttributeClass[] attClNoUri = {
AttributeClass.ATTRIBUTES_CHARSET,
AttributeClass.ATTRIBUTES_NATURAL_LANGUAGE};
AttributeClass[] attCl = {
AttributeClass.ATTRIBUTES_CHARSET,
AttributeClass.ATTRIBUTES_NATURAL_LANGUAGE,
new AttributeClass("printer-uri",
AttributeClass.TAG_URI,
""+myURI)};
@SuppressWarnings("removal")
OutputStream os = java.security.AccessController.
doPrivileged(new java.security.PrivilegedAction<OutputStream>() {
public OutputStream run() {
try {
return urlConnection.getOutputStream();
} catch (Exception e) {
}
return null;
}
});
if (os == null) {
return;
}
boolean success = (myURI == null) ?
writeIPPRequest(os, OP_GET_ATTRIBUTES, attClNoUri) :
writeIPPRequest(os, OP_GET_ATTRIBUTES, attCl);
if (success) {
InputStream is = null;
if ((is = urlConnection.getInputStream())!=null) {
HashMap<String, AttributeClass>[] responseMap = readIPPResponse(is);
if (responseMap != null && responseMap.length > 0) {
getAttMap = responseMap[0];
// If there is extra hashmap created due to duplicate
// key/attribute present in IPPresponse, then use that
// map too by appending to getAttMap after removing the
// duplicate key/value
if (responseMap.length > 1) {
for (int i = 1; i < responseMap.length; i++) {
for (Map.Entry<String, AttributeClass> entry : responseMap[i].entrySet()) {
if (!getAttMap.containsKey(entry.getValue())) {
getAttMap.put(entry.getKey(), entry.getValue());
}
}
}
}
}
} else {
debug_println(debugPrefix+"opGetAttributes - null input stream");
}
is.close();
}
os.close();
} catch (java.io.IOException e) {
debug_println(debugPrefix+"opGetAttributes - input/output stream: "+e);
}
}
public static boolean writeIPPRequest(OutputStream os,
String operCode,
AttributeClass[] attCl) {
OutputStreamWriter osw;
try {
osw = new OutputStreamWriter(os, "UTF-8");
} catch (java.io.UnsupportedEncodingException exc) {
debug_println(debugPrefix+"writeIPPRequest, UTF-8 not supported? Exception: "+exc);
return false;
}
debug_println(debugPrefix+"writeIPPRequest, op code= "+operCode);
char[] opCode = new char[2];
opCode[0] = (char)Byte.parseByte(operCode.substring(0,2), 16);
opCode[1] = (char)Byte.parseByte(operCode.substring(2,4), 16);
char[] bytes = {0x01, 0x01, 0x00, 0x01};
try {
osw.write(bytes, 0, 2); // version number
osw.write(opCode, 0, 2); // operation code
bytes[0] = 0x00; bytes[1] = 0x00;
osw.write(bytes, 0, 4); // request ID #1
bytes[0] = 0x01; // operation-group-tag
osw.write(bytes[0]);
String valStr;
char[] lenStr;
AttributeClass ac;
for (int i=0; i < attCl.length; i++) {
ac = attCl[i];
osw.write(ac.getType()); // value tag
lenStr = ac.getLenChars();
osw.write(lenStr, 0, 2); // length
osw.write(""+ac, 0, ac.getName().length());
// check if string range (0x35 -> 0x49)
if (ac.getType() >= AttributeClass.TAG_TEXT_LANGUAGE &&
ac.getType() <= AttributeClass.TAG_MIME_MEDIATYPE){
valStr = (String)ac.getObjectValue();
bytes[0] = 0; bytes[1] = (char)valStr.length();
osw.write(bytes, 0, 2);
osw.write(valStr, 0, valStr.length());
} // REMIND: need to support other value tags but for CUPS
// string is all we need.
}
osw.write(GRPTAG_END_ATTRIBUTES);
osw.flush();
osw.close();
} catch (java.io.IOException ioe) {
debug_println(debugPrefix+"writeIPPRequest, IPPPrintService Exception in writeIPPRequest: "+ioe);
return false;
}
return true;
}
public static HashMap<String, AttributeClass>[] readIPPResponse(InputStream inputStream) {
if (inputStream == null) {
return null;
}
byte[] response = new byte[MAX_ATTRIBUTE_LENGTH];
try {
DataInputStream ois = new DataInputStream(inputStream);
// read status and ID
if ((ois.read(response, 0, 8) > -1) &&
(response[2] == STATUSCODE_SUCCESS)) {
ByteArrayOutputStream outObj;
int counter=0;
short len = 0;
String attribStr = null;
// assign default value
byte valTagByte = AttributeClass.TAG_KEYWORD;
ArrayList<HashMap<String, AttributeClass>> respList = new ArrayList<>();
HashMap<String, AttributeClass> responseMap = new HashMap<>();
response[0] = ois.readByte();
// check for group tags
while ((response[0] >= GRPTAG_OP_ATTRIBUTES) &&
(response[0] <= GRPTAG_PRINTER_ATTRIBUTES)
&& (response[0] != GRPTAG_END_ATTRIBUTES)) {
debug_println(debugPrefix+"readIPPResponse, checking group tag, response[0]= "+
response[0]);
outObj = new ByteArrayOutputStream();
//make sure counter and attribStr are re-initialized
counter = 0;
attribStr = null;
// read value tag
response[0] = ois.readByte();
while (response[0] >= AttributeClass.TAG_UNSUPPORTED_VALUE &&
response[0] <= AttributeClass.TAG_MEMBER_ATTRNAME) {
// read name length
len = ois.readShort();
// If current value is not part of previous attribute
// then close stream and add it to HashMap.
// It is part of previous attribute if name length=0.
if ((len != 0) && (attribStr != null)) {
//last byte is the total # of values
outObj.write(counter);
outObj.flush();
outObj.close();
byte[] outArray = outObj.toByteArray();
// if key exists, new HashMap
if (responseMap.containsKey(attribStr)) {
respList.add(responseMap);
responseMap = new HashMap<>();
}
// exclude those that are unknown
if (valTagByte >= AttributeClass.TAG_INT) {
AttributeClass ac =
new AttributeClass(attribStr,
valTagByte,
outArray);
responseMap.put(ac.getName(), ac);
debug_println(debugPrefix+ "readIPPResponse "+ac);
}
outObj = new ByteArrayOutputStream();
counter = 0; //reset counter
}
//check if this is new value tag
if (counter == 0) {
valTagByte = response[0];
}
// read attribute name
if (len != 0) {
// read "len" characters
// make sure it doesn't exceed the maximum
if (len > MAX_ATTRIBUTE_LENGTH) {
response = new byte[len]; // expand as needed
}
ois.read(response, 0, len);
attribStr = new String(response, 0, len);
}
// read value length
len = ois.readShort();
// write name length
outObj.write(len);
// read value, make sure it doesn't exceed the maximum
if (len > MAX_ATTRIBUTE_LENGTH) {
response = new byte[len]; // expand as needed
}
ois.read(response, 0, len);
// write value of "len" length
outObj.write(response, 0, len);
counter++;
// read next byte
response[0] = ois.readByte();
}
if (attribStr != null) {
outObj.write(counter);
outObj.flush();
outObj.close();
// if key exists in old HashMap, new HashMap
if ((counter != 0) &&
responseMap.containsKey(attribStr)) {
respList.add(responseMap);
responseMap = new HashMap<>();
}
byte[] outArray = outObj.toByteArray();
AttributeClass ac =
new AttributeClass(attribStr,
valTagByte,
outArray);
responseMap.put(ac.getName(), ac);
}
}
ois.close();
if ((responseMap != null) && (responseMap.size() > 0)) {
respList.add(responseMap);
}
@SuppressWarnings({"unchecked", "rawtypes"})
HashMap<String, AttributeClass>[] tmp =
respList.toArray((HashMap<String, AttributeClass>[])new HashMap[respList.size()]);
return tmp;
} else {
debug_println(debugPrefix+
"readIPPResponse client error, IPP status code: 0x"+
toHex(response[2]) + toHex(response[3]));
return null;
}
} catch (java.io.IOException e) {
debug_println(debugPrefix+"readIPPResponse: "+e);
if (debugPrint) {
e.printStackTrace();
}
return null;
}
}
private static String toHex(byte v) {
String s = Integer.toHexString(v&0xff);
return (s.length() == 2) ? s : "0"+s;
}
public String toString() {
return "IPP Printer : " + getName();
}
public boolean equals(Object obj) {
return (obj == this ||
(obj instanceof IPPPrintService &&
((IPPPrintService)obj).getName().equals(getName())));
}
public int hashCode() {
return this.getClass().hashCode()+getName().hashCode();
}
}
|
mirkosertic/Bytecoder
|
classlib/java.desktop/src/main/resources/META-INF/modules/java.desktop/classes/sun/print/IPPPrintService.java
|
Java
|
apache-2.0
| 78,960
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Spec
module Example
# Base class for customized example groups. Use this if you
# want to make a custom example group.
class ExampleGroup
extend Spec::Example::ExampleGroupMethods
include Spec::Example::ExampleMethods
def initialize(defined_description, options={}, &implementation)
@_options = options
@_defined_description = defined_description
@_implementation = implementation || pending_implementation
@_backtrace = caller
end
private
def pending_implementation
error = NotYetImplementedError.new(caller)
lambda { raise(error) }
end
end
end
end
Spec::ExampleGroup = Spec::Example::ExampleGroup
|
shanti/olio
|
webapp/rails/trunk/vendor/plugins/rspec/lib/spec/example/example_group.rb
|
Ruby
|
apache-2.0
| 1,518
|
package service
import (
"path/filepath"
"reflect"
"testing"
"github.com/docker/docker/api/types/container"
"github.com/docker/libcompose/config"
"github.com/docker/libcompose/docker/ctx"
"github.com/docker/libcompose/lookup"
"github.com/docker/libcompose/yaml"
shlex "github.com/flynn/go-shlex"
"github.com/stretchr/testify/assert"
)
func TestParseCommand(t *testing.T) {
exp := []string{"sh", "-c", "exec /opt/bin/flanneld -logtostderr=true -iface=${NODE_IP}"}
cmd, err := shlex.Split("sh -c 'exec /opt/bin/flanneld -logtostderr=true -iface=${NODE_IP}'")
assert.Nil(t, err)
assert.Equal(t, exp, cmd)
}
func TestParseBindsAndVolumes(t *testing.T) {
ctx := &ctx.Context{}
ctx.ComposeFiles = []string{"foo/docker-compose.yml"}
ctx.ResourceLookup = &lookup.FileResourceLookup{}
abs, err := filepath.Abs(".")
assert.Nil(t, err)
cfg, hostCfg, err := Convert(&config.ServiceConfig{
Volumes: &yaml.Volumes{
Volumes: []*yaml.Volume{
{
Destination: "/foo",
},
{
Source: "/home",
Destination: "/home",
},
{
Destination: "/bar/baz",
},
{
Source: ".",
Destination: "/home",
},
{
Source: "/usr/lib",
Destination: "/usr/lib",
AccessMode: "ro",
},
},
},
}, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, map[string]struct{}{"/foo": {}, "/bar/baz": {}}, cfg.Volumes)
assert.Equal(t, []string{"/home:/home", abs + "/foo:/home", "/usr/lib:/usr/lib:ro"}, hostCfg.Binds)
}
func TestParseLabels(t *testing.T) {
ctx := &ctx.Context{}
ctx.ComposeFiles = []string{"foo/docker-compose.yml"}
ctx.ResourceLookup = &lookup.FileResourceLookup{}
bashCmd := "bash"
fooLabel := "foo.label"
fooLabelValue := "service.config.value"
sc := &config.ServiceConfig{
Entrypoint: yaml.Command([]string{bashCmd}),
Labels: yaml.SliceorMap{fooLabel: "service.config.value"},
}
cfg, _, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
cfg.Labels[fooLabel] = "FUN"
cfg.Entrypoint[0] = "less"
assert.Equal(t, fooLabelValue, sc.Labels[fooLabel])
assert.Equal(t, "FUN", cfg.Labels[fooLabel])
assert.Equal(t, yaml.Command{bashCmd}, sc.Entrypoint)
assert.Equal(t, []string{"less"}, []string(cfg.Entrypoint))
}
func TestDNSOpt(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
DNSOpts: []string{
"use-vc",
"no-tld-query",
},
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.True(t, reflect.DeepEqual([]string{
"use-vc",
"no-tld-query",
}, hostCfg.DNSOptions))
}
func TestGroupAdd(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
GroupAdd: []string{
"root",
"1",
},
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.True(t, reflect.DeepEqual([]string{
"root",
"1",
}, hostCfg.GroupAdd))
}
func TestIsolation(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
Isolation: "default",
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, container.Isolation("default"), hostCfg.Isolation)
}
func TestMemSwappiness(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
MemSwappiness: yaml.MemStringorInt(10),
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, int64(10), *hostCfg.MemorySwappiness)
}
func TestMemReservation(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
MemReservation: 100000,
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, int64(100000), hostCfg.MemoryReservation)
}
func TestOomKillDisable(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
OomKillDisable: true,
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, true, *hostCfg.OomKillDisable)
}
func TestOomScoreAdj(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
OomScoreAdj: 500,
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, 500, hostCfg.OomScoreAdj)
}
func TestStopSignal(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
StopSignal: "SIGTERM",
}
cfg, _, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.Equal(t, "SIGTERM", cfg.StopSignal)
}
func TestTmpfs(t *testing.T) {
ctx := &ctx.Context{}
sc := &config.ServiceConfig{
Tmpfs: yaml.Stringorslice{"/run"},
}
_, hostCfg, err := Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.True(t, reflect.DeepEqual(map[string]string{
"/run": "",
}, hostCfg.Tmpfs))
sc = &config.ServiceConfig{
Tmpfs: yaml.Stringorslice{"/run:rw,noexec,nosuid,size=65536k"},
}
_, hostCfg, err = Convert(sc, ctx.Context, nil)
assert.Nil(t, err)
assert.True(t, reflect.DeepEqual(map[string]string{
"/run": "rw,noexec,nosuid,size=65536k",
}, hostCfg.Tmpfs))
}
|
turnerlabs/harbor-compose
|
vendor/github.com/docker/libcompose/docker/service/convert_test.go
|
GO
|
apache-2.0
| 4,904
|
/*
License
Menge
Copyright © and trademark 2012-14 University of North Carolina at Chapel Hill.
All rights reserved.
Permission to use, copy, modify, and distribute this software and its documentation
for educational, research, and non-profit purposes, without fee, and without a
written agreement is hereby granted, provided that the above copyright notice,
this paragraph, and the following four paragraphs appear in all copies.
This software program and documentation are copyrighted by the University of North
Carolina at Chapel Hill. The software program and documentation are supplied "as is,"
without any accompanying services from the University of North Carolina at Chapel
Hill or the authors. The University of North Carolina at Chapel Hill and the
authors do not warrant that the operation of the program will be uninterrupted
or error-free. The end-user understands that the program was developed for research
purposes and is advised not to rely exclusively on the program for any reason.
IN NO EVENT SHALL THE UNIVERSITY OF NORTH CAROLINA AT CHAPEL HILL OR THE AUTHORS
BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
DOCUMENTATION, EVEN IF THE UNIVERSITY OF NORTH CAROLINA AT CHAPEL HILL OR THE
AUTHORS HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
THE UNIVERSITY OF NORTH CAROLINA AT CHAPEL HILL AND THE AUTHORS SPECIFICALLY
DISCLAIM ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE AND ANY STATUTORY WARRANTY
OF NON-INFRINGEMENT. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND
THE UNIVERSITY OF NORTH CAROLINA AT CHAPEL HILL AND THE AUTHORS HAVE NO OBLIGATIONS
TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
Any questions or comments should be sent to the authors {menge,geom}@cs.unc.edu
*/
#include "KaramouzasInitializer.h"
#include "KaramouzasAgent.h"
#include "MengeCore/Math/RandGenerator.h"
#include "MengeCore/Runtime/Logger.h"
namespace Karamouzas {
using Menge::Logger;
using Menge::logger;
using Menge::Agents::BaseAgent;
using Menge::Math::ConstFloatGenerator;
////////////////////////////////////////////////////////////////
// Implementation of Karamouzas::AgentInitializer
////////////////////////////////////////////////////////////////
// Default values
const float PER_SPACE = 1.f; ///< The default personal space for the agent.
const float ANTICIPATION = 3.f; ///< The anticipation time of the agent.
////////////////////////////////////////////////////////////////
AgentInitializer::AgentInitializer() : Menge::Agents::AgentInitializer() {
_perSpace = new ConstFloatGenerator(PER_SPACE);
_anticipation = new ConstFloatGenerator(ANTICIPATION);
}
////////////////////////////////////////////////////////////////
AgentInitializer::AgentInitializer(const AgentInitializer& init)
: Menge::Agents::AgentInitializer(init) {
_perSpace = init._perSpace->copy();
_anticipation = init._anticipation->copy();
}
////////////////////////////////////////////////////////////////
AgentInitializer::~AgentInitializer() {
delete _perSpace;
delete _anticipation;
}
////////////////////////////////////////////////////////////////
bool AgentInitializer::setProperties(BaseAgent* agent) {
Agent* a = dynamic_cast<Agent*>(agent);
if (a == 0x0) return false;
a->_perSpace = _perSpace->getValue();
a->_anticipation = _anticipation->getValue();
return Menge::Agents::AgentInitializer::setProperties(agent);
}
////////////////////////////////////////////////////////////////
bool AgentInitializer::isRelevant(const ::std::string& tagName) {
return (tagName == "Karamouzas") || Menge::Agents::AgentInitializer::isRelevant(tagName);
}
////////////////////////////////////////////////////////////////
Menge::Agents::AgentInitializer::ParseResult AgentInitializer::setFromXMLAttribute(
const ::std::string& paramName, const ::std::string& value) {
ParseResult result = IGNORED;
if (paramName == "personal_space") {
result = constFloatGenerator(_perSpace, value);
} else if (paramName == "anticipation") {
result = constFloatGenerator(_anticipation, value);
}
if (result == FAILURE) {
logger << Logger::WARN_MSG << "Attribute \"" << paramName;
logger << "\" had an incorrectly formed value: \"" << value;
logger << "\". Using default value.";
result = ACCEPTED;
} else if (result == IGNORED) {
return Menge::Agents::AgentInitializer::setFromXMLAttribute(paramName, value);
}
return result;
}
////////////////////////////////////////////////////////////////
AgentInitializer::ParseResult AgentInitializer::processProperty(::std::string propName,
TiXmlElement* node) {
ParseResult result = IGNORED;
if (propName == "personal_space") {
result = getFloatGenerator(_perSpace, node);
} else if (propName == "anticipation") {
result = getFloatGenerator(_anticipation, node);
}
if (result == FAILURE) {
logger << Logger::ERR_MSG << "Error extracting value distribution from Property ";
logger << propName << ".";
return result;
} else if (result == IGNORED) {
return Menge::Agents::AgentInitializer::processProperty(propName, node);
}
return result;
}
////////////////////////////////////////////////////////////////
void AgentInitializer::setDefaults() {
if (_perSpace) delete _perSpace;
_perSpace = new ConstFloatGenerator(PER_SPACE);
if (_anticipation) delete _anticipation;
_anticipation = new ConstFloatGenerator(ANTICIPATION);
Menge::Agents::AgentInitializer::setDefaults();
}
////////////////////////////////////////////////////////////////
} // namespace Karamouzas
|
MengeCrowdSim/Menge
|
src/Plugins/AgtKaramouzas/KaramouzasInitializer.cpp
|
C++
|
apache-2.0
| 5,845
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.io.StringWriter;
import java.io.Writer;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.channels.FileChannel;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSInputStream;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
import org.apache.hadoop.hdfs.server.namenode.NamenodeFsck.Result;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.hdfs.tools.DFSck;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.RollingFileAppender;
import org.junit.Test;
import com.google.common.collect.Sets;
/**
* A JUnit test for doing fsck
*/
public class TestFsck {
static final String auditLogFile = System.getProperty("test.build.dir",
"build/test") + "/TestFsck-audit.log";
// Pattern for:
// allowed=true ugi=name ip=/address cmd=FSCK src=/ dst=null perm=null
static final Pattern fsckPattern = Pattern.compile(
"allowed=.*?\\s" +
"ugi=.*?\\s" +
"ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" +
"cmd=fsck\\ssrc=\\/\\sdst=null\\s" +
"perm=null\\s" + "proto=.*");
static final Pattern getfileinfoPattern = Pattern.compile(
"allowed=.*?\\s" +
"ugi=.*?\\s" +
"ip=/\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\s" +
"cmd=getfileinfo\\ssrc=\\/\\sdst=null\\s" +
"perm=null\\s" + "proto=.*");
static final Pattern numMissingBlocksPattern = Pattern.compile(
".*Missing blocks:\t\t([0123456789]*).*");
static final Pattern numCorruptBlocksPattern = Pattern.compile(
".*Corrupt blocks:\t\t([0123456789]*).*");
private static final String LINE_SEPARATOR =
System.getProperty("line.separator");
static String runFsck(Configuration conf, int expectedErrCode,
boolean checkErrorCode,String... path)
throws Exception {
ByteArrayOutputStream bStream = new ByteArrayOutputStream();
PrintStream out = new PrintStream(bStream, true);
((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.ALL);
int errCode = ToolRunner.run(new DFSck(conf, out), path);
if (checkErrorCode) {
assertEquals(expectedErrCode, errCode);
}
((Log4JLogger)FSPermissionChecker.LOG).getLogger().setLevel(Level.INFO);
FSImage.LOG.error("OUTPUT = " + bStream.toString());
return bStream.toString();
}
/** do fsck */
@Test
public void testFsck() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck").
setNumFiles(20).build();
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
Configuration conf = new HdfsConfiguration();
final long precision = 1L;
conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
fs = cluster.getFileSystem();
final String fileName = "/srcdat";
util.createFiles(fs, fileName);
util.waitReplication(fs, fileName, (short)3);
final Path file = new Path(fileName);
long aTime = fs.getFileStatus(file).getAccessTime();
Thread.sleep(precision);
setupAuditLogs();
String outStr = runFsck(conf, 0, true, "/");
verifyAuditLogs();
assertEquals(aTime, fs.getFileStatus(file).getAccessTime());
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
if (fs != null) {try{fs.close();} catch(Exception e){}}
cluster.shutdown();
// restart the cluster; bring up namenode but not the data nodes
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0).format(false).build();
outStr = runFsck(conf, 1, true, "/");
// expect the result is corrupt
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
System.out.println(outStr);
// bring up data nodes & cleanup cluster
cluster.startDataNodes(conf, 4, true, null, null);
cluster.waitActive();
cluster.waitClusterUp();
fs = cluster.getFileSystem();
util.cleanup(fs, "/srcdat");
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
/** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException {
File file = new File(auditLogFile);
if (file.exists()) {
file.delete();
}
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.INFO);
PatternLayout layout = new PatternLayout("%m%n");
RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile);
logger.addAppender(appender);
}
private void verifyAuditLogs() throws IOException {
// Turn off the logs
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.OFF);
BufferedReader reader = null;
try {
// Audit log should contain one getfileinfo and one fsck
reader = new BufferedReader(new FileReader(auditLogFile));
String line;
// one extra getfileinfo stems from resolving the path
//
for (int i = 0; i < 2; i++) {
line = reader.readLine();
assertNotNull(line);
assertTrue("Expected getfileinfo event not found in audit log",
getfileinfoPattern.matcher(line).matches());
}
line = reader.readLine();
assertNotNull(line);
assertTrue("Expected fsck event not found in audit log", fsckPattern
.matcher(line).matches());
assertNull("Unexpected event in audit log", reader.readLine());
} finally {
// Close the reader and remove the appender to release the audit log file
// handle after verifying the content of the file.
if (reader != null) {
reader.close();
}
if (logger != null) {
logger.removeAllAppenders();
}
}
}
@Test
public void testFsckNonExistent() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck").
setNumFiles(20).build();
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
fs = cluster.getFileSystem();
util.createFiles(fs, "/srcdat");
util.waitReplication(fs, "/srcdat", (short)3);
String outStr = runFsck(conf, 0, true, "/non-existent");
assertEquals(-1, outStr.indexOf(NamenodeFsck.HEALTHY_STATUS));
System.out.println(outStr);
util.cleanup(fs, "/srcdat");
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
/** Test fsck with permission set on inodes */
@Test
public void testFsckPermission() throws Exception {
final DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(20).build();
final Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
MiniDFSCluster cluster = null;
try {
// Create a cluster with the current user, write some files
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
final MiniDFSCluster c2 = cluster;
final String dir = "/dfsck";
final Path dirpath = new Path(dir);
final FileSystem fs = c2.getFileSystem();
util.createFiles(fs, dir);
util.waitReplication(fs, dir, (short) 3);
fs.setPermission(dirpath, new FsPermission((short) 0700));
// run DFSck as another user, should fail with permission issue
UserGroupInformation fakeUGI = UserGroupInformation.createUserForTesting(
"ProbablyNotARealUserName", new String[] { "ShangriLa" });
fakeUGI.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
System.out.println(runFsck(conf, -1, true, dir));
return null;
}
});
// set permission and try DFSck again as the fake user, should succeed
fs.setPermission(dirpath, new FsPermission((short) 0777));
fakeUGI.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
final String outStr = runFsck(conf, 0, true, dir);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
return null;
}
});
util.cleanup(fs, dir);
} finally {
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testFsckMove() throws Exception {
Configuration conf = new HdfsConfiguration();
final int DFS_BLOCK_SIZE = 1024;
final int NUM_DATANODES = 4;
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DFS_BLOCK_SIZE);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1);
DFSTestUtil util = new DFSTestUtil("TestFsck", 5, 3,
(5 * DFS_BLOCK_SIZE) + (DFS_BLOCK_SIZE - 1), 5 * DFS_BLOCK_SIZE);
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
cluster = new MiniDFSCluster.Builder(conf).
numDataNodes(NUM_DATANODES).build();
String topDir = "/srcdat";
fs = cluster.getFileSystem();
cluster.waitActive();
util.createFiles(fs, topDir);
util.waitReplication(fs, topDir, (short)3);
String outStr = runFsck(conf, 0, true, "/");
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
DFSClient dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()), conf);
String fileNames[] = util.getFileNames(topDir);
CorruptedTestFile ctFiles[] = new CorruptedTestFile[] {
new CorruptedTestFile(fileNames[0], Sets.newHashSet(0),
dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE),
new CorruptedTestFile(fileNames[1], Sets.newHashSet(2, 3),
dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE),
new CorruptedTestFile(fileNames[2], Sets.newHashSet(4),
dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE),
new CorruptedTestFile(fileNames[3], Sets.newHashSet(0, 1, 2, 3),
dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE),
new CorruptedTestFile(fileNames[4], Sets.newHashSet(1, 2, 3, 4),
dfsClient, NUM_DATANODES, DFS_BLOCK_SIZE)
};
int totalMissingBlocks = 0;
for (CorruptedTestFile ctFile : ctFiles) {
totalMissingBlocks += ctFile.getTotalMissingBlocks();
}
for (CorruptedTestFile ctFile : ctFiles) {
ctFile.removeBlocks(cluster);
}
// Wait for fsck to discover all the missing blocks
while (true) {
outStr = runFsck(conf, 1, false, "/");
String numMissing = null;
String numCorrupt = null;
for (String line : outStr.split(LINE_SEPARATOR)) {
Matcher m = numMissingBlocksPattern.matcher(line);
if (m.matches()) {
numMissing = m.group(1);
}
m = numCorruptBlocksPattern.matcher(line);
if (m.matches()) {
numCorrupt = m.group(1);
}
if (numMissing != null && numCorrupt != null) {
break;
}
}
if (numMissing == null || numCorrupt == null) {
throw new IOException("failed to find number of missing or corrupt" +
" blocks in fsck output.");
}
if (numMissing.equals(Integer.toString(totalMissingBlocks))) {
assertTrue(numCorrupt.equals(Integer.toString(0)));
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
break;
}
try {
Thread.sleep(100);
} catch (InterruptedException ignore) {
}
}
// Copy the non-corrupt blocks of corruptFileName to lost+found.
outStr = runFsck(conf, 1, false, "/", "-move");
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
// Make sure that we properly copied the block files from the DataNodes
// to lost+found
for (CorruptedTestFile ctFile : ctFiles) {
ctFile.checkSalvagedRemains();
}
// Fix the filesystem by removing corruptFileName
outStr = runFsck(conf, 1, true, "/", "-delete");
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
// Check to make sure we have a healthy filesystem
outStr = runFsck(conf, 0, true, "/");
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
util.cleanup(fs, topDir);
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
static private class CorruptedTestFile {
final private String name;
final private Set<Integer> blocksToCorrupt;
final private DFSClient dfsClient;
final private int numDataNodes;
final private int blockSize;
final private byte[] initialContents;
public CorruptedTestFile(String name, Set<Integer> blocksToCorrupt,
DFSClient dfsClient, int numDataNodes, int blockSize)
throws IOException {
this.name = name;
this.blocksToCorrupt = blocksToCorrupt;
this.dfsClient = dfsClient;
this.numDataNodes = numDataNodes;
this.blockSize = blockSize;
this.initialContents = cacheInitialContents();
}
public int getTotalMissingBlocks() {
return blocksToCorrupt.size();
}
private byte[] cacheInitialContents() throws IOException {
HdfsFileStatus status = dfsClient.getFileInfo(name);
byte[] content = new byte[(int)status.getLen()];
DFSInputStream in = null;
try {
in = dfsClient.open(name);
IOUtils.readFully(in, content, 0, content.length);
} finally {
in.close();
}
return content;
}
public void removeBlocks(MiniDFSCluster cluster)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
for (int corruptIdx : blocksToCorrupt) {
// Corrupt a block by deleting it
ExtendedBlock block = dfsClient.getNamenode().getBlockLocations(
name, blockSize * corruptIdx, Long.MAX_VALUE).get(0).getBlock();
for (int i = 0; i < numDataNodes; i++) {
File blockFile = cluster.getBlockFile(i, block);
if(blockFile != null && blockFile.exists()) {
assertTrue(blockFile.delete());
}
}
}
}
public void checkSalvagedRemains() throws IOException {
int chainIdx = 0;
HdfsFileStatus status = dfsClient.getFileInfo(name);
long length = status.getLen();
int numBlocks = (int)((length + blockSize - 1) / blockSize);
DFSInputStream in = null;
byte[] blockBuffer = new byte[blockSize];
try {
for (int blockIdx = 0; blockIdx < numBlocks; blockIdx++) {
if (blocksToCorrupt.contains(blockIdx)) {
if (in != null) {
in.close();
in = null;
}
continue;
}
if (in == null) {
in = dfsClient.open("/lost+found" + name + "/" + chainIdx);
chainIdx++;
}
int len = blockBuffer.length;
if (blockIdx == (numBlocks - 1)) {
// The last block might not be full-length
len = (int)(in.getFileLength() % blockSize);
if (len == 0) len = blockBuffer.length;
}
IOUtils.readFully(in, blockBuffer, 0, len);
int startIdx = blockIdx * blockSize;
for (int i = 0; i < len; i++) {
if (initialContents[startIdx + i] != blockBuffer[i]) {
throw new IOException("salvaged file " + name + " differed " +
"from what we expected on block " + blockIdx);
}
}
}
} finally {
IOUtils.cleanup(null, in);
}
}
}
@Test
public void testFsckMoveAndDelete() throws Exception {
final int MAX_MOVE_TRIES = 5;
DFSTestUtil util = new DFSTestUtil.Builder().
setName("TestFsckMoveAndDelete").setNumFiles(5).build();
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
String topDir = "/srcdat";
fs = cluster.getFileSystem();
cluster.waitActive();
util.createFiles(fs, topDir);
util.waitReplication(fs, topDir, (short)3);
String outStr = runFsck(conf, 0, true, "/");
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// Corrupt a block by deleting it
String[] fileNames = util.getFileNames(topDir);
DFSClient dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()), conf);
String corruptFileName = fileNames[0];
ExtendedBlock block = dfsClient.getNamenode().getBlockLocations(
corruptFileName, 0, Long.MAX_VALUE).get(0).getBlock();
for (int i=0; i<4; i++) {
File blockFile = cluster.getBlockFile(i, block);
if(blockFile != null && blockFile.exists()) {
assertTrue(blockFile.delete());
}
}
// We excpect the filesystem to be corrupted
outStr = runFsck(conf, 1, false, "/");
while (!outStr.contains(NamenodeFsck.CORRUPT_STATUS)) {
try {
Thread.sleep(100);
} catch (InterruptedException ignore) {
}
outStr = runFsck(conf, 1, false, "/");
}
// After a fsck -move, the corrupted file should still exist.
for (int i = 0; i < MAX_MOVE_TRIES; i++) {
outStr = runFsck(conf, 1, true, "/", "-move" );
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
String[] newFileNames = util.getFileNames(topDir);
boolean found = false;
for (String f : newFileNames) {
if (f.equals(corruptFileName)) {
found = true;
break;
}
}
assertTrue(found);
}
// Fix the filesystem by moving corrupted files to lost+found
outStr = runFsck(conf, 1, true, "/", "-move", "-delete");
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
// Check to make sure we have healthy filesystem
outStr = runFsck(conf, 0, true, "/");
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
util.cleanup(fs, topDir);
if (fs != null) {try{fs.close();} catch(Exception e){}}
cluster.shutdown();
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testFsckOpenFiles() throws Exception {
DFSTestUtil util = new DFSTestUtil.Builder().setName("TestFsck").
setNumFiles(4).build();
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
String topDir = "/srcdat";
String randomString = "HADOOP ";
fs = cluster.getFileSystem();
cluster.waitActive();
util.createFiles(fs, topDir);
util.waitReplication(fs, topDir, (short)3);
String outStr = runFsck(conf, 0, true, "/");
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// Open a file for writing and do not close for now
Path openFile = new Path(topDir + "/openFile");
FSDataOutputStream out = fs.create(openFile);
int writeCount = 0;
while (writeCount != 100) {
out.write(randomString.getBytes());
writeCount++;
}
// We expect the filesystem to be HEALTHY and show one open file
outStr = runFsck(conf, 0, true, topDir);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
assertFalse(outStr.contains("OPENFORWRITE"));
// Use -openforwrite option to list open files
outStr = runFsck(conf, 0, true, topDir, "-openforwrite");
System.out.println(outStr);
assertTrue(outStr.contains("OPENFORWRITE"));
assertTrue(outStr.contains("openFile"));
// Close the file
out.close();
// Now, fsck should show HEALTHY fs and should not show any open files
outStr = runFsck(conf, 0, true, topDir);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
assertFalse(outStr.contains("OPENFORWRITE"));
util.cleanup(fs, topDir);
if (fs != null) {try{fs.close();} catch(Exception e){}}
cluster.shutdown();
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
@Test
public void testCorruptBlock() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000);
// Set short retry timeouts so this test runs faster
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
FileSystem fs = null;
DFSClient dfsClient = null;
LocatedBlocks blocks = null;
int replicaCount = 0;
Random random = new Random();
String outStr = null;
short factor = 1;
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path file1 = new Path("/testCorruptBlock");
DFSTestUtil.createFile(fs, file1, 1024, factor, 0);
// Wait until file replication has completed
DFSTestUtil.waitReplication(fs, file1, factor);
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, file1);
// Make sure filesystem is in healthy state
outStr = runFsck(conf, 0, true, "/");
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// corrupt replicas
File blockFile = cluster.getBlockFile(0, block);
if (blockFile != null && blockFile.exists()) {
RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw");
FileChannel channel = raFile.getChannel();
String badString = "BADBAD";
int rand = random.nextInt((int) channel.size()/2);
raFile.seek(rand);
raFile.write(badString.getBytes());
raFile.close();
}
// Read the file to trigger reportBadBlocks
try {
IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf,
true);
} catch (IOException ie) {
// Ignore exception
}
dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()), conf);
blocks = dfsClient.getNamenode().
getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
replicaCount = blocks.get(0).getLocations().length;
while (replicaCount != factor) {
try {
Thread.sleep(100);
} catch (InterruptedException ignore) {
}
blocks = dfsClient.getNamenode().
getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
replicaCount = blocks.get(0).getLocations().length;
}
assertTrue (blocks.get(0).isCorrupt());
// Check if fsck reports the same
outStr = runFsck(conf, 1, true, "/");
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
assertTrue(outStr.contains("testCorruptBlock"));
} finally {
if (cluster != null) {cluster.shutdown();}
}
}
@Test
public void testUnderMinReplicatedBlock() throws Exception {
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000);
// Set short retry timeouts so this test runs faster
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
// Set minReplication to 2
short minReplication=2;
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY,minReplication);
FileSystem fs = null;
DFSClient dfsClient = null;
LocatedBlocks blocks = null;
int replicaCount = 0;
Random random = new Random();
String outStr = null;
short factor = 1;
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
fs = cluster.getFileSystem();
Path file1 = new Path("/testUnderMinReplicatedBlock");
DFSTestUtil.createFile(fs, file1, 1024, minReplication, 0);
// Wait until file replication has completed
DFSTestUtil.waitReplication(fs, file1, minReplication);
ExtendedBlock block = DFSTestUtil.getFirstBlock(fs, file1);
// Make sure filesystem is in healthy state
outStr = runFsck(conf, 0, true, "/");
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// corrupt the first replica
File blockFile = cluster.getBlockFile(0, block);
if (blockFile != null && blockFile.exists()) {
RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw");
FileChannel channel = raFile.getChannel();
String badString = "BADBAD";
int rand = random.nextInt((int) channel.size()/2);
raFile.seek(rand);
raFile.write(badString.getBytes());
raFile.close();
}
dfsClient = new DFSClient(new InetSocketAddress("localhost",
cluster.getNameNodePort()), conf);
blocks = dfsClient.getNamenode().
getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
replicaCount = blocks.get(0).getLocations().length;
while (replicaCount != factor) {
try {
Thread.sleep(100);
// Read the file to trigger reportBadBlocks
try {
IOUtils.copyBytes(fs.open(file1), new IOUtils.NullOutputStream(), conf,
true);
} catch (IOException ie) {
// Ignore exception
}
System.out.println("sleep in try: replicaCount="+replicaCount+" factor="+factor);
} catch (InterruptedException ignore) {
}
blocks = dfsClient.getNamenode().
getBlockLocations(file1.toString(), 0, Long.MAX_VALUE);
replicaCount = blocks.get(0).getLocations().length;
}
// Check if fsck reports the same
outStr = runFsck(conf, 0, true, "/");
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
assertTrue(outStr.contains("UNDER MIN REPL'D BLOCKS:\t1 (100.0 %)"));
assertTrue(outStr.contains("DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY:\t2"));
} finally {
if (cluster != null) {cluster.shutdown();}
}
}
@Test(timeout = 60000)
public void testFsckReplicaDetails() throws Exception {
final short REPL_FACTOR = 1;
short NUM_DN = 1;
final long blockSize = 512;
final long fileSize = 1024;
boolean checkDecommissionInProgress = false;
String[] racks = { "/rack1" };
String[] hosts = { "host1" };
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
MiniDFSCluster cluster;
DistributedFileSystem dfs;
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts).racks(racks).build();
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
// create files
final String testFile = new String("/testfile");
final Path path = new Path(testFile);
DFSTestUtil.createFile(dfs, path, fileSize, REPL_FACTOR, 1000L);
DFSTestUtil.waitReplication(dfs, path, REPL_FACTOR);
try {
// make sure datanode that has replica is fine before decommission
String fsckOut = runFsck(conf, 0, true, testFile, "-files", "-blocks", "-replicaDetails");
assertTrue(fsckOut.contains(NamenodeFsck.HEALTHY_STATUS));
assertTrue(fsckOut.contains("(LIVE)"));
// decommission datanode
ExtendedBlock eb = DFSTestUtil.getFirstBlock(dfs, path);
DatanodeDescriptor dn =
cluster.getNameNode().getNamesystem().getBlockManager()
.getBlockCollection(eb.getLocalBlock()).getBlocks()[0].getDatanode(0);
cluster.getNameNode().getNamesystem().getBlockManager().getDatanodeManager()
.getDecomManager().startDecommission(dn);
String dnName = dn.getXferAddr();
// check the replica status while decommissioning
fsckOut = runFsck(conf, 0, true, testFile, "-files", "-blocks", "-replicaDetails");
assertTrue(fsckOut.contains("(DECOMMISSIONING)"));
// Start 2nd Datanode and wait for decommission to start
cluster.startDataNodes(conf, 1, true, null, null, null);
DatanodeInfo datanodeInfo = null;
do {
Thread.sleep(2000);
for (DatanodeInfo info : dfs.getDataNodeStats()) {
if (dnName.equals(info.getXferAddr())) {
datanodeInfo = info;
}
}
if (!checkDecommissionInProgress && datanodeInfo != null
&& datanodeInfo.isDecommissionInProgress()) {
checkDecommissionInProgress = true;
}
} while (datanodeInfo != null && !datanodeInfo.isDecommissioned());
// check the replica status after decommission is done
fsckOut = runFsck(conf, 0, true, testFile, "-files", "-blocks", "-replicaDetails");
assertTrue(fsckOut.contains("(DECOMMISSIONED)"));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/** Test if fsck can return -1 in case of failure
*
* @throws Exception
*/
@Test
public void testFsckError() throws Exception {
MiniDFSCluster cluster = null;
try {
// bring up a one-node cluster
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).build();
String fileName = "/test.txt";
Path filePath = new Path(fileName);
FileSystem fs = cluster.getFileSystem();
// create a one-block file
DFSTestUtil.createFile(fs, filePath, 1L, (short)1, 1L);
DFSTestUtil.waitReplication(fs, filePath, (short)1);
// intentionally corrupt NN data structure
INodeFile node = (INodeFile) cluster.getNamesystem().dir.getINode
(fileName, true);
final BlockInfoContiguous[] blocks = node.getBlocks();
assertEquals(blocks.length, 1);
blocks[0].setNumBytes(-1L); // set the block length to be negative
// run fsck and expect a failure with -1 as the error code
String outStr = runFsck(conf, -1, true, fileName);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.FAILURE_STATUS));
// clean up file system
fs.delete(filePath, true);
} finally {
if (cluster != null) {cluster.shutdown();}
}
}
/** check if option -list-corruptfiles of fsck command works properly */
@Test
public void testFsckListCorruptFilesBlocks() throws Exception {
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000);
conf.setInt(DFSConfigKeys.DFS_DATANODE_DIRECTORYSCAN_INTERVAL_KEY, 1);
FileSystem fs = null;
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
fs = cluster.getFileSystem();
DFSTestUtil util = new DFSTestUtil.Builder().
setName("testGetCorruptFiles").setNumFiles(3).setMaxLevels(1).
setMaxSize(1024).build();
util.createFiles(fs, "/corruptData", (short) 1);
util.waitReplication(fs, "/corruptData", (short) 1);
// String outStr = runFsck(conf, 0, true, "/corruptData", "-list-corruptfileblocks");
String outStr = runFsck(conf, 0, false, "/corruptData", "-list-corruptfileblocks");
System.out.println("1. good fsck out: " + outStr);
assertTrue(outStr.contains("has 0 CORRUPT files"));
// delete the blocks
final String bpid = cluster.getNamesystem().getBlockPoolId();
for (int i=0; i<4; i++) {
for (int j=0; j<=1; j++) {
File storageDir = cluster.getInstanceStorageDir(i, j);
File data_dir = MiniDFSCluster.getFinalizedDir(storageDir, bpid);
List<File> metadataFiles = MiniDFSCluster.getAllBlockMetadataFiles(
data_dir);
if (metadataFiles == null)
continue;
for (File metadataFile : metadataFiles) {
File blockFile = Block.metaToBlockFile(metadataFile);
assertTrue("Cannot remove file.", blockFile.delete());
assertTrue("Cannot remove file.", metadataFile.delete());
}
}
}
// wait for the namenode to see the corruption
final NamenodeProtocols namenode = cluster.getNameNodeRpc();
CorruptFileBlocks corruptFileBlocks = namenode
.listCorruptFileBlocks("/corruptData", null);
int numCorrupt = corruptFileBlocks.getFiles().length;
while (numCorrupt == 0) {
Thread.sleep(1000);
corruptFileBlocks = namenode
.listCorruptFileBlocks("/corruptData", null);
numCorrupt = corruptFileBlocks.getFiles().length;
}
outStr = runFsck(conf, -1, true, "/corruptData", "-list-corruptfileblocks");
System.out.println("2. bad fsck out: " + outStr);
assertTrue(outStr.contains("has 3 CORRUPT files"));
// Do a listing on a dir which doesn't have any corrupt blocks and validate
util.createFiles(fs, "/goodData");
outStr = runFsck(conf, 0, true, "/goodData", "-list-corruptfileblocks");
System.out.println("3. good fsck out: " + outStr);
assertTrue(outStr.contains("has 0 CORRUPT files"));
util.cleanup(fs,"/corruptData");
util.cleanup(fs, "/goodData");
} finally {
if (cluster != null) {cluster.shutdown();}
}
}
/**
* Test for checking fsck command on illegal arguments should print the proper
* usage.
*/
@Test
public void testToCheckTheFsckCommandOnIllegalArguments() throws Exception {
MiniDFSCluster cluster = null;
try {
// bring up a one-node cluster
Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).build();
String fileName = "/test.txt";
Path filePath = new Path(fileName);
FileSystem fs = cluster.getFileSystem();
// create a one-block file
DFSTestUtil.createFile(fs, filePath, 1L, (short) 1, 1L);
DFSTestUtil.waitReplication(fs, filePath, (short) 1);
// passing illegal option
String outStr = runFsck(conf, -1, true, fileName, "-thisIsNotAValidFlag");
System.out.println(outStr);
assertTrue(!outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// passing multiple paths are arguments
outStr = runFsck(conf, -1, true, "/", fileName);
System.out.println(outStr);
assertTrue(!outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// clean up file system
fs.delete(filePath, true);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Tests that the # of missing block replicas and expected replicas is correct
* @throws IOException
*/
@Test
public void testFsckMissingReplicas() throws IOException {
// Desired replication factor
// Set this higher than NUM_REPLICAS so it's under-replicated
final short REPL_FACTOR = 2;
// Number of replicas to actually start
final short NUM_REPLICAS = 1;
// Number of blocks to write
final short NUM_BLOCKS = 3;
// Set a small-ish blocksize
final long blockSize = 512;
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
MiniDFSCluster cluster = null;
DistributedFileSystem dfs = null;
try {
// Startup a minicluster
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_REPLICAS).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
final String pathString = new String("/testfile");
final Path path = new Path(pathString);
long fileLen = blockSize * NUM_BLOCKS;
DFSTestUtil.createFile(dfs, path, fileLen, REPL_FACTOR, 1);
// Create an under-replicated file
NameNode namenode = cluster.getNameNode();
NetworkTopology nettop = cluster.getNamesystem().getBlockManager()
.getDatanodeManager().getNetworkTopology();
Map<String,String[]> pmap = new HashMap<String, String[]>();
Writer result = new StringWriter();
PrintWriter out = new PrintWriter(result, true);
InetAddress remoteAddress = InetAddress.getLocalHost();
NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out,
NUM_REPLICAS, (short)1, remoteAddress);
// Run the fsck and check the Result
final HdfsFileStatus file =
namenode.getRpcServer().getFileInfo(pathString);
assertNotNull(file);
Result res = new Result(conf);
fsck.check(pathString, file, res);
// Also print the output from the fsck, for ex post facto sanity checks
System.out.println(result.toString());
assertEquals(res.missingReplicas,
(NUM_BLOCKS*REPL_FACTOR) - (NUM_BLOCKS*NUM_REPLICAS));
assertEquals(res.numExpectedReplicas, NUM_BLOCKS*REPL_FACTOR);
} finally {
if(dfs != null) {
dfs.close();
}
if(cluster != null) {
cluster.shutdown();
}
}
}
/**
* Tests that the # of misreplaced replicas is correct
* @throws IOException
*/
@Test
public void testFsckMisPlacedReplicas() throws IOException {
// Desired replication factor
final short REPL_FACTOR = 2;
// Number of replicas to actually start
short NUM_DN = 2;
// Number of blocks to write
final short NUM_BLOCKS = 3;
// Set a small-ish blocksize
final long blockSize = 512;
String [] racks = {"/rack1", "/rack1"};
String [] hosts = {"host1", "host2"};
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
MiniDFSCluster cluster = null;
DistributedFileSystem dfs = null;
try {
// Startup a minicluster
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts)
.racks(racks).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
// Create a file that will be intentionally under-replicated
final String pathString = new String("/testfile");
final Path path = new Path(pathString);
long fileLen = blockSize * NUM_BLOCKS;
DFSTestUtil.createFile(dfs, path, fileLen, REPL_FACTOR, 1);
// Create an under-replicated file
NameNode namenode = cluster.getNameNode();
NetworkTopology nettop = cluster.getNamesystem().getBlockManager()
.getDatanodeManager().getNetworkTopology();
// Add a new node on different rack, so previous blocks' replicas
// are considered to be misplaced
nettop.add(DFSTestUtil.getDatanodeDescriptor("/rack2", "/host3"));
NUM_DN++;
Map<String,String[]> pmap = new HashMap<String, String[]>();
Writer result = new StringWriter();
PrintWriter out = new PrintWriter(result, true);
InetAddress remoteAddress = InetAddress.getLocalHost();
NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out,
NUM_DN, REPL_FACTOR, remoteAddress);
// Run the fsck and check the Result
final HdfsFileStatus file =
namenode.getRpcServer().getFileInfo(pathString);
assertNotNull(file);
Result res = new Result(conf);
fsck.check(pathString, file, res);
// check misReplicatedBlock number.
assertEquals(res.numMisReplicatedBlocks, NUM_BLOCKS);
} finally {
if(dfs != null) {
dfs.close();
}
if(cluster != null) {
cluster.shutdown();
}
}
}
/** Test fsck with FileNotFound */
@Test
public void testFsckFileNotFound() throws Exception {
// Number of replicas to actually start
final short NUM_REPLICAS = 1;
Configuration conf = new Configuration();
NameNode namenode = mock(NameNode.class);
NetworkTopology nettop = mock(NetworkTopology.class);
Map<String,String[]> pmap = new HashMap<String, String[]>();
Writer result = new StringWriter();
PrintWriter out = new PrintWriter(result, true);
InetAddress remoteAddress = InetAddress.getLocalHost();
FSNamesystem fsName = mock(FSNamesystem.class);
BlockManager blockManager = mock(BlockManager.class);
DatanodeManager dnManager = mock(DatanodeManager.class);
when(namenode.getNamesystem()).thenReturn(fsName);
when(fsName.getBlockLocations(any(FSPermissionChecker.class), anyString(),
anyLong(), anyLong(),
anyBoolean(), anyBoolean()))
.thenThrow(new FileNotFoundException());
when(fsName.getBlockManager()).thenReturn(blockManager);
when(blockManager.getDatanodeManager()).thenReturn(dnManager);
NamenodeFsck fsck = new NamenodeFsck(conf, namenode, nettop, pmap, out,
NUM_REPLICAS, (short)1, remoteAddress);
String pathString = "/tmp/testFile";
long length = 123L;
boolean isDir = false;
int blockReplication = 1;
long blockSize = 128 *1024L;
long modTime = 123123123L;
long accessTime = 123123120L;
FsPermission perms = FsPermission.getDefault();
String owner = "foo";
String group = "bar";
byte [] symlink = null;
byte [] path = new byte[128];
path = DFSUtil.string2Bytes(pathString);
long fileId = 312321L;
int numChildren = 1;
byte storagePolicy = 0;
HdfsFileStatus file = new HdfsFileStatus(length, isDir, blockReplication,
blockSize, modTime, accessTime, perms, owner, group, symlink,
path, fileId, numChildren, null, storagePolicy);
Result res = new Result(conf);
try {
fsck.check(pathString, file, res);
} catch (Exception e) {
fail("Unexpected exception "+ e.getMessage());
}
assertTrue(res.toString().contains("HEALTHY"));
}
/** Test fsck with symlinks in the filesystem */
@Test
public void testFsckSymlink() throws Exception {
final DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
final Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
MiniDFSCluster cluster = null;
FileSystem fs = null;
try {
final long precision = 1L;
conf.setLong(DFSConfigKeys.DFS_NAMENODE_ACCESSTIME_PRECISION_KEY, precision);
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 10000L);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
fs = cluster.getFileSystem();
final String fileName = "/srcdat";
util.createFiles(fs, fileName);
final FileContext fc = FileContext.getFileContext(
cluster.getConfiguration(0));
final Path file = new Path(fileName);
final Path symlink = new Path("/srcdat-symlink");
fc.createSymlink(file, symlink, false);
util.waitReplication(fs, fileName, (short)3);
long aTime = fc.getFileStatus(symlink).getAccessTime();
Thread.sleep(precision);
setupAuditLogs();
String outStr = runFsck(conf, 0, true, "/");
verifyAuditLogs();
assertEquals(aTime, fc.getFileStatus(symlink).getAccessTime());
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
assertTrue(outStr.contains("Total symlinks:\t\t1"));
util.cleanup(fs, fileName);
} finally {
if (fs != null) {try{fs.close();} catch(Exception e){}}
if (cluster != null) { cluster.shutdown(); }
}
}
/**
* Test for including the snapshot files in fsck report
*/
@Test
public void testFsckForSnapshotFiles() throws Exception {
final Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
.build();
try {
String runFsck = runFsck(conf, 0, true, "/", "-includeSnapshots",
"-files");
assertTrue(runFsck.contains("HEALTHY"));
final String fileName = "/srcdat";
DistributedFileSystem hdfs = cluster.getFileSystem();
Path file1 = new Path(fileName);
DFSTestUtil.createFile(hdfs, file1, 1024, (short) 1, 1000L);
hdfs.allowSnapshot(new Path("/"));
hdfs.createSnapshot(new Path("/"), "mySnapShot");
runFsck = runFsck(conf, 0, true, "/", "-includeSnapshots", "-files");
assertTrue(runFsck.contains("/.snapshot/mySnapShot/srcdat"));
runFsck = runFsck(conf, 0, true, "/", "-files");
assertFalse(runFsck.contains("mySnapShot"));
} finally {
cluster.shutdown();
}
}
/**
* Test for blockIdCK
*/
@Test
public void testBlockIdCK() throws Exception {
final short REPL_FACTOR = 2;
short NUM_DN = 2;
final long blockSize = 512;
String [] racks = {"/rack1", "/rack2"};
String [] hosts = {"host1", "host2"};
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);
MiniDFSCluster cluster = null;
DistributedFileSystem dfs = null;
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts)
.racks(racks).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, REPL_FACTOR , 1000L);
util.waitReplication(dfs, path, REPL_FACTOR);
StringBuilder sb = new StringBuilder();
for (LocatedBlock lb: util.getAllBlocks(dfs, path)){
sb.append(lb.getBlock().getLocalBlock().getBlockName()+" ");
}
String[] bIds = sb.toString().split(" ");
//run fsck
try {
//illegal input test
String runFsckResult = runFsck(conf, 0, true, "/", "-blockId",
"not_a_block_id");
assertTrue(runFsckResult.contains("Incorrect blockId format:"));
//general test
runFsckResult = runFsck(conf, 0, true, "/", "-blockId", sb.toString());
assertTrue(runFsckResult.contains(bIds[0]));
assertTrue(runFsckResult.contains(bIds[1]));
assertTrue(runFsckResult.contains(
"Block replica on datanode/rack: host1/rack1 is HEALTHY"));
assertTrue(runFsckResult.contains(
"Block replica on datanode/rack: host2/rack2 is HEALTHY"));
} finally {
cluster.shutdown();
}
}
/**
* Test for blockIdCK with datanode decommission
*/
@Test
public void testBlockIdCKDecommission() throws Exception {
final short REPL_FACTOR = 1;
short NUM_DN = 2;
final long blockSize = 512;
boolean checkDecommissionInProgress = false;
String [] racks = {"/rack1", "/rack2"};
String [] hosts = {"host1", "host2"};
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);
MiniDFSCluster cluster;
DistributedFileSystem dfs ;
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts)
.racks(racks).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, REPL_FACTOR, 1000L);
util.waitReplication(dfs, path, REPL_FACTOR);
StringBuilder sb = new StringBuilder();
for (LocatedBlock lb: util.getAllBlocks(dfs, path)){
sb.append(lb.getBlock().getLocalBlock().getBlockName()+" ");
}
String[] bIds = sb.toString().split(" ");
try {
//make sure datanode that has replica is fine before decommission
String outStr = runFsck(conf, 0, true, "/", "-blockId", bIds[0]);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
//decommission datanode
ExtendedBlock eb = util.getFirstBlock(dfs, path);
DatanodeDescriptor dn = cluster.getNameNode().getNamesystem()
.getBlockManager().getBlockCollection(eb.getLocalBlock())
.getBlocks()[0].getDatanode(0);
cluster.getNameNode().getNamesystem().getBlockManager()
.getDatanodeManager().getDecomManager().startDecommission(dn);
String dnName = dn.getXferAddr();
//wait for decommission start
DatanodeInfo datanodeInfo = null;
int count = 0;
do {
Thread.sleep(2000);
for (DatanodeInfo info : dfs.getDataNodeStats()) {
if (dnName.equals(info.getXferAddr())) {
datanodeInfo = info;
}
}
//check decommissioning only once
if(!checkDecommissionInProgress && datanodeInfo != null
&& datanodeInfo.isDecommissionInProgress()) {
String fsckOut = runFsck(conf, 3, true, "/", "-blockId", bIds[0]);
assertTrue(fsckOut.contains(NamenodeFsck.DECOMMISSIONING_STATUS));
checkDecommissionInProgress = true;
}
} while (datanodeInfo != null && !datanodeInfo.isDecommissioned());
//check decommissioned
String fsckOut = runFsck(conf, 2, true, "/", "-blockId", bIds[0]);
assertTrue(fsckOut.contains(NamenodeFsck.DECOMMISSIONED_STATUS));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test for blockIdCK with block corruption
*/
@Test
public void testBlockIdCKCorruption() throws Exception {
short NUM_DN = 1;
final long blockSize = 512;
Random random = new Random();
ExtendedBlock block;
short repFactor = 1;
String [] racks = {"/rack1"};
String [] hosts = {"host1"};
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, 1000);
// Set short retry timeouts so this test runs faster
conf.setInt(HdfsClientConfigKeys.Retry.WINDOW_BASE_KEY, 10);
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
MiniDFSCluster cluster = null;
DistributedFileSystem dfs = null;
try {
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts)
.racks(racks).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String pathString = new String("/testfile");
final Path path = new Path(pathString);
util.createFile(dfs, path, 1024, repFactor, 1000L);
util.waitReplication(dfs, path, repFactor);
StringBuilder sb = new StringBuilder();
for (LocatedBlock lb: util.getAllBlocks(dfs, path)){
sb.append(lb.getBlock().getLocalBlock().getBlockName()+" ");
}
String[] bIds = sb.toString().split(" ");
//make sure block is healthy before we corrupt it
String outStr = runFsck(conf, 0, true, "/", "-blockId", bIds[0]);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// corrupt replicas
block = DFSTestUtil.getFirstBlock(dfs, path);
File blockFile = cluster.getBlockFile(0, block);
if (blockFile != null && blockFile.exists()) {
RandomAccessFile raFile = new RandomAccessFile(blockFile, "rw");
FileChannel channel = raFile.getChannel();
String badString = "BADBAD";
int rand = random.nextInt((int) channel.size()/2);
raFile.seek(rand);
raFile.write(badString.getBytes());
raFile.close();
}
util.waitCorruptReplicas(dfs, cluster.getNamesystem(), path, block, 1);
outStr = runFsck(conf, 1, false, "/", "-blockId", block.getBlockName());
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.CORRUPT_STATUS));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
private void writeFile(final DistributedFileSystem dfs,
Path dir, String fileName) throws IOException {
Path filePath = new Path(dir.toString() + Path.SEPARATOR + fileName);
final FSDataOutputStream out = dfs.create(filePath);
out.writeChars("teststring");
out.close();
}
private void writeFile(final DistributedFileSystem dfs,
String dirName, String fileName, String StoragePolicy) throws IOException {
Path dirPath = new Path(dirName);
dfs.mkdirs(dirPath);
dfs.setStoragePolicy(dirPath, StoragePolicy);
writeFile(dfs, dirPath, fileName);
}
/**
* Test storage policy display
*/
@Test
public void testStoragePoliciesCK() throws Exception {
final Configuration conf = new HdfsConfiguration();
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(3)
.storageTypes(
new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
.build();
try {
cluster.waitActive();
final DistributedFileSystem dfs = cluster.getFileSystem();
writeFile(dfs, "/testhot", "file", "HOT");
writeFile(dfs, "/testwarm", "file", "WARM");
writeFile(dfs, "/testcold", "file", "COLD");
String outStr = runFsck(conf, 0, true, "/", "-storagepolicies");
assertTrue(outStr.contains("DISK:3(HOT)"));
assertTrue(outStr.contains("DISK:1,ARCHIVE:2(WARM)"));
assertTrue(outStr.contains("ARCHIVE:3(COLD)"));
assertTrue(outStr.contains("All blocks satisfy specified storage policy."));
dfs.setStoragePolicy(new Path("/testhot"), "COLD");
dfs.setStoragePolicy(new Path("/testwarm"), "COLD");
outStr = runFsck(conf, 0, true, "/", "-storagepolicies");
assertTrue(outStr.contains("DISK:3(HOT)"));
assertTrue(outStr.contains("DISK:1,ARCHIVE:2(WARM)"));
assertTrue(outStr.contains("ARCHIVE:3(COLD)"));
assertFalse(outStr.contains("All blocks satisfy specified storage policy."));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test for blocks on decommissioning hosts are not shown as missing
*/
@Test
public void testFsckWithDecommissionedReplicas() throws Exception {
final short REPL_FACTOR = 1;
short NUM_DN = 2;
final long blockSize = 512;
final long fileSize = 1024;
boolean checkDecommissionInProgress = false;
String [] racks = {"/rack1", "/rack2"};
String [] hosts = {"host1", "host2"};
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
MiniDFSCluster cluster;
DistributedFileSystem dfs ;
cluster =
new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DN).hosts(hosts)
.racks(racks).build();
assertNotNull("Failed Cluster Creation", cluster);
cluster.waitClusterUp();
dfs = cluster.getFileSystem();
assertNotNull("Failed to get FileSystem", dfs);
DFSTestUtil util = new DFSTestUtil.Builder().
setName(getClass().getSimpleName()).setNumFiles(1).build();
//create files
final String testFile = new String("/testfile");
final Path path = new Path(testFile);
util.createFile(dfs, path, fileSize, REPL_FACTOR, 1000L);
util.waitReplication(dfs, path, REPL_FACTOR);
try {
// make sure datanode that has replica is fine before decommission
String outStr = runFsck(conf, 0, true, testFile);
System.out.println(outStr);
assertTrue(outStr.contains(NamenodeFsck.HEALTHY_STATUS));
// decommission datanode
ExtendedBlock eb = util.getFirstBlock(dfs, path);
DatanodeDescriptor dn = cluster.getNameNode().getNamesystem()
.getBlockManager().getBlockCollection(eb.getLocalBlock())
.getBlocks()[0].getDatanode(0);
cluster.getNameNode().getNamesystem().getBlockManager()
.getDatanodeManager().getDecomManager().startDecommission(dn);
String dnName = dn.getXferAddr();
// wait for decommission start
DatanodeInfo datanodeInfo = null;
int count = 0;
do {
Thread.sleep(2000);
for (DatanodeInfo info : dfs.getDataNodeStats()) {
if (dnName.equals(info.getXferAddr())) {
datanodeInfo = info;
}
}
// check the replica status should be healthy(0)
// instead of corruption (1) during decommissioning
if(!checkDecommissionInProgress && datanodeInfo != null
&& datanodeInfo.isDecommissionInProgress()) {
String fsckOut = runFsck(conf, 0, true, testFile);
checkDecommissionInProgress = true;
}
} while (datanodeInfo != null && !datanodeInfo.isDecommissioned());
// check the replica status should be healthy(0) after decommission
// is done
String fsckOut = runFsck(conf, 0, true, testFile);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
}
|
busbey/hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java
|
Java
|
apache-2.0
| 62,516
|
/* crc32.c -- compute the CRC-32 of a data stream
* Copyright (C) 1995-2005 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*
* Thanks to Rodney Brown <rbrown64@csc.com.au> for his contribution of faster
* CRC methods: exclusive-oring 32 bits of data at a time, and pre-computing
* tables for updating the shift register in one step with three exclusive-ors
* instead of four steps with four exclusive-ors. This results in about a
* factor of two increase in speed on a Power PC G4 (PPC7455) using gcc -O3.
*/
/* @(#) $Id: crc32.c 788dceb3bcb264d2d0145027de632aa513cc7695 2007-01-19 01:31:01Z Mateusz Loskot $ */
/*
Note on the use of DYNAMIC_CRC_TABLE: there is no mutex or semaphore
protection on the static variables used to control the first-use generation
of the crc tables. Therefore, if you #define DYNAMIC_CRC_TABLE, you should
first call get_crc_table() to initialize the tables before allowing more than
one thread to use crc32().
*/
#ifdef MAKECRCH
# include <stdio.h>
# ifndef DYNAMIC_CRC_TABLE
# define DYNAMIC_CRC_TABLE
# endif /* !DYNAMIC_CRC_TABLE */
#endif /* MAKECRCH */
#include "zutil.h" /* for STDC and FAR definitions */
#define local static
/* Find a four-byte integer type for crc32_little() and crc32_big(). */
#ifndef NOBYFOUR
# ifdef STDC /* need ANSI C limits.h to determine sizes */
# include <limits.h>
# define BYFOUR
# if (UINT_MAX == 0xffffffffUL)
typedef unsigned int u4;
# else
# if (ULONG_MAX == 0xffffffffUL)
typedef unsigned long u4;
# else
# if (USHRT_MAX == 0xffffffffUL)
typedef unsigned short u4;
# else
# undef BYFOUR /* can't find a four-byte integer type! */
# endif
# endif
# endif
# endif /* STDC */
#endif /* !NOBYFOUR */
/* Definitions for doing the crc four data bytes at a time. */
#ifdef BYFOUR
# define REV(w) (((w)>>24)+(((w)>>8)&0xff00)+ \
(((w)&0xff00)<<8)+(((w)&0xff)<<24))
local unsigned long crc32_little OF((unsigned long,
const unsigned char FAR *, unsigned));
local unsigned long crc32_big OF((unsigned long,
const unsigned char FAR *, unsigned));
# define TBLS 8
#else
# define TBLS 1
#endif /* BYFOUR */
/* Local functions for crc concatenation */
local unsigned long gf2_matrix_times OF((unsigned long *mat,
unsigned long vec));
local void gf2_matrix_square OF((unsigned long *square, unsigned long *mat));
#ifdef DYNAMIC_CRC_TABLE
local volatile int crc_table_empty = 1;
local unsigned long FAR crc_table[TBLS][256];
local void make_crc_table OF((void));
#ifdef MAKECRCH
local void write_table OF((FILE *, const unsigned long FAR *));
#endif /* MAKECRCH */
/*
Generate tables for a byte-wise 32-bit CRC calculation on the polynomial:
x^32+x^26+x^23+x^22+x^16+x^12+x^11+x^10+x^8+x^7+x^5+x^4+x^2+x+1.
Polynomials over GF(2) are represented in binary, one bit per coefficient,
with the lowest powers in the most significant bit. Then adding polynomials
is just exclusive-or, and multiplying a polynomial by x is a right shift by
one. If we call the above polynomial p, and represent a byte as the
polynomial q, also with the lowest power in the most significant bit (so the
byte 0xb1 is the polynomial x^7+x^3+x+1), then the CRC is (q*x^32) mod p,
where a mod b means the remainder after dividing a by b.
This calculation is done using the shift-register method of multiplying and
taking the remainder. The register is initialized to zero, and for each
incoming bit, x^32 is added mod p to the register if the bit is a one (where
x^32 mod p is p+x^32 = x^26+...+1), and the register is multiplied mod p by
x (which is shifting right by one and adding x^32 mod p if the bit shifted
out is a one). We start with the highest power (least significant bit) of
q and repeat for all eight bits of q.
The first table is simply the CRC of all possible eight bit values. This is
all the information needed to generate CRCs on data a byte at a time for all
combinations of CRC register values and incoming bytes. The remaining tables
allow for word-at-a-time CRC calculation for both big-endian and little-
endian machines, where a word is four bytes.
*/
local void make_crc_table()
{
unsigned long c;
int n, k;
unsigned long poly; /* polynomial exclusive-or pattern */
/* terms of polynomial defining this crc (except x^32): */
static volatile int first = 1; /* flag to limit concurrent making */
static const unsigned char p[] = {0,1,2,4,5,7,8,10,11,12,16,22,23,26};
/* See if another task is already doing this (not thread-safe, but better
than nothing -- significantly reduces duration of vulnerability in
case the advice about DYNAMIC_CRC_TABLE is ignored) */
if (first) {
first = 0;
/* make exclusive-or pattern from polynomial (0xedb88320UL) */
poly = 0UL;
for (n = 0; n < sizeof(p)/sizeof(unsigned char); n++)
poly |= 1UL << (31 - p[n]);
/* generate a crc for every 8-bit value */
for (n = 0; n < 256; n++) {
c = (unsigned long)n;
for (k = 0; k < 8; k++)
c = c & 1 ? poly ^ (c >> 1) : c >> 1;
crc_table[0][n] = c;
}
#ifdef BYFOUR
/* generate crc for each value followed by one, two, and three zeros,
and then the byte reversal of those as well as the first table */
for (n = 0; n < 256; n++) {
c = crc_table[0][n];
crc_table[4][n] = REV(c);
for (k = 1; k < 4; k++) {
c = crc_table[0][c & 0xff] ^ (c >> 8);
crc_table[k][n] = c;
crc_table[k + 4][n] = REV(c);
}
}
#endif /* BYFOUR */
crc_table_empty = 0;
}
else { /* not first */
/* wait for the other guy to finish (not efficient, but rare) */
while (crc_table_empty)
;
}
#ifdef MAKECRCH
/* write out CRC tables to crc32.h */
{
FILE *out;
out = fopen("crc32.h", "w");
if (out == NULL) return;
fprintf(out, "/* crc32.h -- tables for rapid CRC calculation\n");
fprintf(out, " * Generated automatically by crc32.c\n */\n\n");
fprintf(out, "local const unsigned long FAR ");
fprintf(out, "crc_table[TBLS][256] =\n{\n {\n");
write_table(out, crc_table[0]);
# ifdef BYFOUR
fprintf(out, "#ifdef BYFOUR\n");
for (k = 1; k < 8; k++) {
fprintf(out, " },\n {\n");
write_table(out, crc_table[k]);
}
fprintf(out, "#endif\n");
# endif /* BYFOUR */
fprintf(out, " }\n};\n");
fclose(out);
}
#endif /* MAKECRCH */
}
#ifdef MAKECRCH
local void write_table(out, table)
FILE *out;
const unsigned long FAR *table;
{
int n;
for (n = 0; n < 256; n++)
fprintf(out, "%s0x%08lxUL%s", n % 5 ? "" : " ", table[n],
n == 255 ? "\n" : (n % 5 == 4 ? ",\n" : ", "));
}
#endif /* MAKECRCH */
#else /* !DYNAMIC_CRC_TABLE */
/* ========================================================================
* Tables of CRC-32s of all single-byte values, made by make_crc_table().
*/
#include "crc32.h"
#endif /* DYNAMIC_CRC_TABLE */
/* =========================================================================
* This function can be used by asm versions of crc32()
*/
const unsigned long FAR * ZEXPORT get_crc_table()
{
#ifdef DYNAMIC_CRC_TABLE
if (crc_table_empty)
make_crc_table();
#endif /* DYNAMIC_CRC_TABLE */
return (const unsigned long FAR *)crc_table;
}
/* ========================================================================= */
#define DO1 crc = crc_table[0][((int)crc ^ (*buf++)) & 0xff] ^ (crc >> 8)
#define DO8 DO1; DO1; DO1; DO1; DO1; DO1; DO1; DO1
/* ========================================================================= */
unsigned long ZEXPORT crc32(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
unsigned len;
{
if (buf == Z_NULL) return 0UL;
#ifdef DYNAMIC_CRC_TABLE
if (crc_table_empty)
make_crc_table();
#endif /* DYNAMIC_CRC_TABLE */
#ifdef BYFOUR
if (sizeof(void *) == sizeof(ptrdiff_t)) {
u4 endian;
endian = 1;
if (*((unsigned char *)(&endian)))
return crc32_little(crc, buf, len);
else
return crc32_big(crc, buf, len);
}
#endif /* BYFOUR */
crc = crc ^ 0xffffffffUL;
while (len >= 8) {
DO8;
len -= 8;
}
if (len) do {
DO1;
} while (--len);
return crc ^ 0xffffffffUL;
}
#ifdef BYFOUR
/* ========================================================================= */
#define DOLIT4 c ^= *buf4++; \
c = crc_table[3][c & 0xff] ^ crc_table[2][(c >> 8) & 0xff] ^ \
crc_table[1][(c >> 16) & 0xff] ^ crc_table[0][c >> 24]
#define DOLIT32 DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4; DOLIT4
/* ========================================================================= */
local unsigned long crc32_little(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
unsigned len;
{
register u4 c;
register const u4 FAR *buf4;
c = (u4)crc;
c = ~c;
while (len && ((ptrdiff_t)buf & 3)) {
c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
len--;
}
buf4 = (const u4 FAR *)(const void FAR *)buf;
while (len >= 32) {
DOLIT32;
len -= 32;
}
while (len >= 4) {
DOLIT4;
len -= 4;
}
buf = (const unsigned char FAR *)buf4;
if (len) do {
c = crc_table[0][(c ^ *buf++) & 0xff] ^ (c >> 8);
} while (--len);
c = ~c;
return (unsigned long)c;
}
/* ========================================================================= */
#define DOBIG4 c ^= *++buf4; \
c = crc_table[4][c & 0xff] ^ crc_table[5][(c >> 8) & 0xff] ^ \
crc_table[6][(c >> 16) & 0xff] ^ crc_table[7][c >> 24]
#define DOBIG32 DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4; DOBIG4
/* ========================================================================= */
local unsigned long crc32_big(crc, buf, len)
unsigned long crc;
const unsigned char FAR *buf;
unsigned len;
{
register u4 c;
register const u4 FAR *buf4;
c = REV((u4)crc);
c = ~c;
while (len && ((ptrdiff_t)buf & 3)) {
c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
len--;
}
buf4 = (const u4 FAR *)(const void FAR *)buf;
buf4--;
while (len >= 32) {
DOBIG32;
len -= 32;
}
while (len >= 4) {
DOBIG4;
len -= 4;
}
buf4++;
buf = (const unsigned char FAR *)buf4;
if (len) do {
c = crc_table[4][(c >> 24) ^ *buf++] ^ (c << 8);
} while (--len);
c = ~c;
return (unsigned long)(REV(c));
}
#endif /* BYFOUR */
#define GF2_DIM 32 /* dimension of GF(2) vectors (length of CRC) */
/* ========================================================================= */
local unsigned long gf2_matrix_times(mat, vec)
unsigned long *mat;
unsigned long vec;
{
unsigned long sum;
sum = 0;
while (vec) {
if (vec & 1)
sum ^= *mat;
vec >>= 1;
mat++;
}
return sum;
}
/* ========================================================================= */
local void gf2_matrix_square(square, mat)
unsigned long *square;
unsigned long *mat;
{
int n;
for (n = 0; n < GF2_DIM; n++)
square[n] = gf2_matrix_times(mat, mat[n]);
}
/* ========================================================================= */
uLong ZEXPORT crc32_combine(crc1, crc2, len2)
uLong crc1;
uLong crc2;
z_off_t len2;
{
int n;
unsigned long row;
unsigned long even[GF2_DIM]; /* even-power-of-two zeros operator */
unsigned long odd[GF2_DIM]; /* odd-power-of-two zeros operator */
/* degenerate case */
if (len2 == 0)
return crc1;
/* put operator for one zero bit in odd */
odd[0] = 0xedb88320L; /* CRC-32 polynomial */
row = 1;
for (n = 1; n < GF2_DIM; n++) {
odd[n] = row;
row <<= 1;
}
/* put operator for two zero bits in even */
gf2_matrix_square(even, odd);
/* put operator for four zero bits in odd */
gf2_matrix_square(odd, even);
/* apply len2 zeros to crc1 (first square will put the operator for one
zero byte, eight zero bits, in even) */
do {
/* apply zeros operator for this bit of len2 */
gf2_matrix_square(even, odd);
if (len2 & 1)
crc1 = gf2_matrix_times(even, crc1);
len2 >>= 1;
/* if no more bits set, then done */
if (len2 == 0)
break;
/* another iteration of the loop with odd and even swapped */
gf2_matrix_square(odd, even);
if (len2 & 1)
crc1 = gf2_matrix_times(odd, crc1);
len2 >>= 1;
/* if no more bits set, then done */
} while (len2 != 0);
/* return combined crc */
crc1 ^= crc2;
return crc1;
}
|
naturalatlas/node-gdal
|
deps/libgdal/gdal/frmts/zlib/crc32.c
|
C
|
apache-2.0
| 13,280
|
/*
* Copyright (c) 2014 Wael Chatila / Icegreen Technologies. All Rights Reserved.
* This software is released under the Apache license 2.0
* This file has been used and modified.
* Original file can be found on http://foedus.sourceforge.net
*/
package com.icegreen.greenmail.mail;
import com.icegreen.greenmail.foedus.util.Resource;
import com.icegreen.greenmail.foedus.util.Workspace;
import com.icegreen.greenmail.util.GreenMailUtil;
import com.icegreen.greenmail.util.InternetPrintWriter;
import javax.mail.internet.MimeMessage;
import java.io.*;
import java.util.LinkedList;
import java.util.List;
/**
* Contains information for delivering a mime email.
* <p/>
* <p/>
* Since a MovingMessage many be passed through many queues and
* handlers before it can be safely deleted, destruction it handled
* by reference counting. When an object first obtains a reference
* to a MovingMessage, it should immediately call {@link #acquire()}.
* As soon as it has finished processing, that object must call
* {@link #releaseContent()}.
* </p>
*/
public class MovingMessage {
private MailAddress returnPath;
private List<MailAddress> toAddresses = new LinkedList<MailAddress>();
private Workspace _workspace;
private Resource _content;
private MimeMessage message;
private int _references = 0;
public List<MailAddress> getToAddresses() {
return toAddresses;
}
public MovingMessage(Workspace workspace) {
_workspace = workspace;
}
public MimeMessage getMessage() {
return message;
}
public Reader getContent()
throws IOException {
return _content.getReader();
}
public void acquire() {
_references++;
}
public void releaseContent() {
if (_references > 0) {
_references--;
} else if (_content != null) {
_workspace.release(_content);
_content = null;
}
}
public MailAddress getReturnPath() {
return returnPath;
}
public void setReturnPath(MailAddress fromAddress) {
this.returnPath = fromAddress;
}
public void addRecipient(MailAddress s) {
toAddresses.add(s);
}
public void removeRecipient(MailAddress s) {
toAddresses.remove(s);
}
/**
* Reads the contents of the stream until
* <CRLF>.<CRLF> is encountered.
* <p/>
* <p/>
* It would be possible and prehaps desirable to prevent the
* adding of an unnecessary CRLF at the end of the message, but
* it hardly seems worth 30 seconds of effort.
* </p>
*/
public void readDotTerminatedContent(BufferedReader in)
throws IOException {
_content = _workspace.getTmpFile();
Writer data = _content.getWriter();
PrintWriter dataWriter = new InternetPrintWriter(data);
while (true) {
String line = in.readLine();
if (line == null)
throw new EOFException("Did not receive <CRLF>.<CRLF>");
if (".".equals(line)) {
dataWriter.close();
break;
} else if (line.startsWith(".")) {
dataWriter.println(line.substring(1));
} else {
dataWriter.println(line);
}
}
try {
message = GreenMailUtil.newMimeMessage(_content.getAsString());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
|
jianglili007/greenmail
|
greenmail-core/src/main/java/com/icegreen/greenmail/mail/MovingMessage.java
|
Java
|
apache-2.0
| 3,641
|
/*
* UnlinkEvent.h
*
* Created on: May 3, 2012
* Author: augcampos
*/
#ifndef UNLINKEVENT_H_
#define UNLINKEVENT_H_
#include "BridgeEvent.h"
namespace asteriskcpp {
/**
* An UnlinkEvent is triggered when a link between two voice channels is discontinued, for example,
* just before call completion.<p>
* It is implemented in <code>channel.c</code>
*
* @deprecated as of 1.0.0, use {@link org.asteriskjava.manager.event.BridgeEvent} and
* {@link BridgeEvent#isUnlink()} instead
*/
class UnlinkEvent : public BridgeEvent {
public:
UnlinkEvent(const std::string & values);
virtual ~UnlinkEvent();
};
} /* namespace asteriskcpp */
#endif /* UNLINKEVENT_H_ */
|
tiijima/asterisk-cpp
|
asterisk-cpp/asteriskcpp/manager/events/UnlinkEvent.h
|
C
|
apache-2.0
| 750
|
/*
* Copyright 2010 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua;
import com.intellij.openapi.fileTypes.*;
import org.jetbrains.annotations.*;
/**
* Created by IntelliJ IDEA.
* User: Jon S Akhtar
* Date: Apr 26, 2010
* Time: 3:39:01 PM
*/
public class LuaFileTypeLoader extends FileTypeFactory {
public void createFileTypes(@NotNull FileTypeConsumer consumer) {
consumer.consume(LuaFileType.LUA_FILE_TYPE,
LuaFileType.EXTENSION_FILE_NAME_MATCHERS);
}
}
|
internetisalie/lua-for-idea
|
src/main/java/com/sylvanaar/idea/Lua/LuaFileTypeLoader.java
|
Java
|
apache-2.0
| 1,114
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.