code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/* global angular */
'use strict';
var controllers = angular.module('participantDialogControllers', []);
/**
* The New Conversation Controller provides a UI for creating a new Conversation.
* This consists of a place to edit a title bar, a list of users to select,
* and a place to enter a first message.
*/
controllers.controller('participantsDialogCtrl', function($scope) {
/**
* Hacky DOMish way of getting the selected users
* Angular developers should feel free to improve on this
* and submit a PR :-)
*/
function getSelectedUsers() {
var result = Array.prototype.slice.call(document.querySelectorAll('.participant-list :checked'))
.map(function(node) {
return $scope.appCtrlState.client.getIdentity(node.value);
});
return result;
}
/**
* On typing a message and hitting ENTER, the send method is called.
* $scope.chatCtrlState.currentConversation is a basic object; we use it to get the
* Conversation instance and use the instance to interact with Layer Services
* sending the Message.
*
* See: http://static.layer.com/sdk/docs/#!/api/layer.Conversation
*
* For this method, we simply do nothing if no participants;
* ideally, some helpful error would be reported to the user...
*
* Once the Conversation itself has been created, update the URL
* to point to that Conversation.
*/
$scope.createConversation = function() {
// Get the userIds of all selected users
var participants = getSelectedUsers();
if (participants.length) {
// Create the Conversation
var conversationInstance = $scope.appCtrlState.client.createConversation({
participants: participants,
distinct: participants.length === 1,
});
// Update our location.
location.hash = '#' + conversationInstance.id.substring(8);
Array.prototype.slice.call(document.querySelectorAll('.participant-list :checked')).forEach(function(input) {
input.checked = false;
});
}
};
});
| layerhq/layer-js-sampleapps | examples/websdk-samples/angular/app/participant-dialog-controllers.js | JavaScript | apache-2.0 | 2,024 |
/**
* Created by mario (https://github.com/hyprstack) on 10/07/15.
*/
var imageUpLoad = Backbone.View.extend({
template: _.template(document.getElementById("file-uploader-template").innerHTML),
// global variables passed in through options - required
_file: null, // our target file
cb: null,
maxFileSize: null, // megabytes
maxHeight: null, // pixels - resize target
maxWidth: null, // pixels - resize target
minWidth: null, // pixels
maxAllowedHeight: null, //pixels
maxAllowedWidth: null, // pixels
// globals determined through function
sourceWidth: null,
sourceHeight: null,
initialize: function (options) {
this._file = options.file;
this.cb = options.cb;
this.maxHeight = options.maxHeight;
this.maxWidth = options.maxWidth;
this.maxFileSize = options.maxFileSize;
this.minWidth = options.minWidth;
this.maxAllowedHeight = options.maxAllowedHeight;
this.maxAllowedWidth = options.maxAllowedWidth;
},
render: function () {
this.setElement(this.template());
this.mainFn(this._file);
return this;
},
// reads file data to determine if exif exists or not. returns a data object
readFileData: function (file, callback) {
loadImage.parseMetaData(
file,
_.bind(this.resizeImage, this, file, callback)
);
},
// resizes image if needs to be resized, otherwise simply corrects orientation
resizeImage: function (file, callback, data) {
var options = this.sizeConfig(file);
if (data.exif) {
options.orientation = data.exif.get('Orientation');
}
loadImage(
file,
_.bind(this.returnDataUrl, this),
options
);
},
// call callback and passes dataUrl
returnDataUrl: function (src) {
var dataUrl = src.toDataURL("image/png");
this.cb(dataUrl);
},
transformImg: function (file) {
// triggers event warning if image width is less than minWidth
if ( this.sourceWidth < this.minWidth ) {
this.trigger("image-small");
return;
}
this.readFileData(file);
},
// returns the width and height of the source file and calls the transform function
mainFn: function (file) {
var fr = new FileReader();
var that = this;
fr.onloadend = function () {
var _img = new Image();
// image width and height can only be determined once the image has loaded
_img.onload = function () {
that.sourceWidth = _img.width;
that.sourceHeight = _img.height;
that.transformImg(file);
};
_img.src = fr.result;
};
fr.readAsDataURL(file);
},
// check and configure our options for resizing
sizeConfig: function (file) {
var _size = file.size;
if ( _size > this.maxFileSize && (this.sourceHeight > this.maxAllowedHeight || this.sourceWidth > this.maxAllowedWidth) ) {
return this.resizeConf();
} else if ( _size < this.maxFileSize && (this.sourceHeight > this.maxAllowedHeight || this.sourceWidth > this.maxAllowedWidth) ) {
return this.resizeConf();
} else {
return {
contain: true,
canvas: true
}
}
},
resizeConf: function () {
return {
contain: true,
maxWidth: this.maxWidth,
maxHeight: this.maxHeight,
canvas: true
}
}
});
| hevnly/js-image-resizer | js/views/image-upload.js | JavaScript | apache-2.0 | 3,693 |
import React from 'react';
import {storiesOf} from '@kadira/storybook';
import 'codemirror/mode/python/python';
import {CodeWindow} from '../CodeWindow';
class ColorPickSpan extends React.Component {
static propTypes = {
onChange: React.PropTypes.func,
color: React.PropTypes.string,
span: React.PropTypes.object,
children: React.PropTypes.node,
};
mouseOver = () => {
this.props.onChange([{color: this.props.color, span: this.props.span}]);
}
mouseOut = () => {
this.props.onChange([]);
}
render() {
return (
<span
style={{
backgroundColor: this.props.color
}}
onMouseOver={this.mouseOver}
onMouseOut={this.mouseOut}
>
{this.props.children}
</span>
);
}
}
class HighlightPicker extends React.Component {
state = { highlights: [] }
changeHighlight = (highlights) => {
this.setState({
highlights: highlights
});
}
render() {
return (
<div>
<div>
<ColorPickSpan
onChange={this.changeHighlight}
color="#eee"
span={{from:{line: 0, ch: 0}, to: {line:0, ch: 5}}}
>
Gray
</ColorPickSpan>
<ColorPickSpan
onChange={this.changeHighlight}
color="blue"
span={{from:{line: 1, ch: 5}, to: {line:1, ch: 10}}}
>
Blue
</ColorPickSpan>
</div>
<CodeWindow
source={"print('Ahoy world')\ncheck: 2 + 2 is 4 end"}
codemirrorOptions={{ mode: "python" }}
highlights={this.state.highlights}
/>
</div>
);
}
}
storiesOf("CodeWindow", module)
.add("withSource", () => (
<CodeWindow
source={"print('Ahoy, world!')"}
codemirrorOptions={{
mode: "python"
}}
/>
))
.add("highlighted", () => (
<CodeWindow
source={"print('Ahoy, world!')"}
codemirrorOptions={{
mode: "python"
}}
highlights={[
{
color: "#eeeeee",
span: {from: {line: 0, ch: 1}, to: {line: 0, ch: 5}}
}
]}
/>
))
.add("highlightChoice", () => (
<HighlightPicker />
));
| pcardune/pyret-ide | src/components/stories/CodeWindow.js | JavaScript | apache-2.0 | 2,215 |
package org.gradle.test.performance.mediummonolithicjavaproject.p156;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test3133 {
Production3133 objectUnderTest = new Production3133();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p156/Test3133.java | Java | apache-2.0 | 2,111 |
package org.gradle.test.performance.mediummonolithicjavaproject.p475;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test9508 {
Production9508 objectUnderTest = new Production9508();
@Test
public void testProperty0() {
String value = "value";
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
String value = "value";
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
String value = "value";
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p475/Test9508.java | Java | apache-2.0 | 2,111 |
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.cache.serialize.entities;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.server.cache.serialize.entities.LabelTypeSerializer.deserialize;
import static com.google.gerrit.server.cache.serialize.entities.LabelTypeSerializer.serialize;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.entities.LabelType;
import com.google.gerrit.entities.LabelValue;
import java.util.Optional;
import org.junit.Test;
public class LabelTypeSerializerTest {
static final LabelType ALL_VALUES_SET =
LabelType.builder(
"name",
ImmutableList.of(
LabelValue.create((short) 0, "no vote"),
LabelValue.create((short) 1, "approved")))
.setDescription(Optional.of("description"))
.setCanOverride(!LabelType.DEF_CAN_OVERRIDE)
.setAllowPostSubmit(!LabelType.DEF_ALLOW_POST_SUBMIT)
.setIgnoreSelfApproval(!LabelType.DEF_IGNORE_SELF_APPROVAL)
.setRefPatterns(ImmutableList.of("refs/heads/*", "refs/tags/*"))
.setDefaultValue((short) 1)
.setCopyCondition("is:ANY")
.setCopyAnyScore(!LabelType.DEF_COPY_ANY_SCORE)
.setCopyMaxScore(!LabelType.DEF_COPY_MAX_SCORE)
.setCopyMinScore(!LabelType.DEF_COPY_MIN_SCORE)
.setCopyAllScoresIfListOfFilesDidNotChange(
!LabelType.DEF_COPY_ALL_SCORES_IF_LIST_OF_FILES_DID_NOT_CHANGE)
.setCopyAllScoresOnMergeFirstParentUpdate(
!LabelType.DEF_COPY_ALL_SCORES_ON_MERGE_FIRST_PARENT_UPDATE)
.setCopyAllScoresOnTrivialRebase(!LabelType.DEF_COPY_ALL_SCORES_ON_TRIVIAL_REBASE)
.setCopyAllScoresIfNoCodeChange(!LabelType.DEF_COPY_ALL_SCORES_IF_NO_CODE_CHANGE)
.setCopyAllScoresIfNoChange(!LabelType.DEF_COPY_ALL_SCORES_IF_NO_CHANGE)
.setCopyValues(ImmutableList.of((short) 0, (short) 1))
.setMaxNegative((short) -1)
.setMaxPositive((short) 1)
.build();
@Test
public void roundTrip() {
assertThat(deserialize(serialize(ALL_VALUES_SET))).isEqualTo(ALL_VALUES_SET);
}
@Test
public void roundTripWithMinimalValues() {
LabelType autoValue =
ALL_VALUES_SET.toBuilder().setRefPatterns(null).setCopyCondition(null).build();
assertThat(deserialize(serialize(autoValue))).isEqualTo(autoValue);
}
}
| GerritCodeReview/gerrit | javatests/com/google/gerrit/server/cache/serialize/entities/LabelTypeSerializerTest.java | Java | apache-2.0 | 3,013 |
/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.batch.api.chunk;
import java.io.Serializable;
/**
* The AbstractItemReader provides default implementations
* of less commonly implemented methods.
*
*/
public abstract class AbstractItemReader implements ItemReader {
/**
* Override this method if the ItemReader requires
* any open time processing.
* The default implementation does nothing.
*
* @param last checkpoint for this ItemReader - may be null.
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public void open(Serializable checkpoint) throws Exception { }
/**
* Override this method if the ItemReader requires
* any close time processing.
* The default implementation does nothing.
*
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public void close() throws Exception { }
/**
* Implement read logic for the ItemReader in this
* method.
*
* @return next item or null
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public abstract Object readItem() throws Exception;
/**
* Override this method if the ItemReader supports
* checkpoints.
* The default implementation returns null.
*
* @return checkpoint data
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public Serializable checkpointInfo() throws Exception {
return null;
}
}
| engagepoint/jbatch | JSR352.API/src/javax/batch/api/chunk/AbstractItemReader.java | Java | apache-2.0 | 2,171 |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Cloud
module Language
module V1beta2
# ================================================================ #
#
# Represents the input to API methods.
# @!attribute [rw] type
# @return [Google::Cloud::Language::V1beta2::Document::Type]
# Required. If the type is not set or is `TYPE_UNSPECIFIED`,
# returns an `INVALID_ARGUMENT` error.
# @!attribute [rw] content
# @return [String]
# The content of the input in string format.
# @!attribute [rw] gcs_content_uri
# @return [String]
# The Google Cloud Storage URI where the file content is located.
# This URI must be of the form: gs://bucket_name/object_name. For more
# details, see https://cloud.google.com/storage/docs/reference-uris.
# NOTE: Cloud Storage object versioning is not supported.
# @!attribute [rw] language
# @return [String]
# The language of the document (if not specified, the language is
# automatically detected). Both ISO and BCP-47 language codes are
# accepted.<br>
# [Language Support](https://cloud.google.com/natural-language/docs/languages)
# lists currently supported languages for each API method.
# If the language (either specified by the caller or automatically detected)
# is not supported by the called API method, an `INVALID_ARGUMENT` error
# is returned.
class Document
# The document types enum.
module Type
# The content type is not specified.
TYPE_UNSPECIFIED = 0
# Plain text
PLAIN_TEXT = 1
# HTML
HTML = 2
end
end
# Represents a sentence in the input document.
# @!attribute [rw] text
# @return [Google::Cloud::Language::V1beta2::TextSpan]
# The sentence text.
# @!attribute [rw] sentiment
# @return [Google::Cloud::Language::V1beta2::Sentiment]
# For calls to {AnalyzeSentiment} or if
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_document_sentiment AnnotateTextRequest::Features#extract_document_sentiment} is set to
# true, this field will contain the sentiment for the sentence.
class Sentence; end
# Represents a phrase in the text that is a known entity, such as
# a person, an organization, or location. The API associates information, such
# as salience and mentions, with entities.
# @!attribute [rw] name
# @return [String]
# The representative name for the entity.
# @!attribute [rw] type
# @return [Google::Cloud::Language::V1beta2::Entity::Type]
# The entity type.
# @!attribute [rw] metadata
# @return [Hash{String => String}]
# Metadata associated with the entity.
#
# Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
# available. The associated keys are "wikipedia_url" and "mid", respectively.
# @!attribute [rw] salience
# @return [Float]
# The salience score associated with the entity in the [0, 1.0] range.
#
# The salience score for an entity provides information about the
# importance or centrality of that entity to the entire document text.
# Scores closer to 0 are less salient, while scores closer to 1.0 are highly
# salient.
# @!attribute [rw] mentions
# @return [Array<Google::Cloud::Language::V1beta2::EntityMention>]
# The mentions of this entity in the input document. The API currently
# supports proper noun mentions.
# @!attribute [rw] sentiment
# @return [Google::Cloud::Language::V1beta2::Sentiment]
# For calls to {AnalyzeEntitySentiment} or if
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_entity_sentiment AnnotateTextRequest::Features#extract_entity_sentiment} is set to
# true, this field will contain the aggregate sentiment expressed for this
# entity in the provided document.
class Entity
# The type of the entity.
module Type
# Unknown
UNKNOWN = 0
# Person
PERSON = 1
# Location
LOCATION = 2
# Organization
ORGANIZATION = 3
# Event
EVENT = 4
# Work of art
WORK_OF_ART = 5
# Consumer goods
CONSUMER_GOOD = 6
# Other types
OTHER = 7
end
end
# Represents the smallest syntactic building block of the text.
# @!attribute [rw] text
# @return [Google::Cloud::Language::V1beta2::TextSpan]
# The token text.
# @!attribute [rw] part_of_speech
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech]
# Parts of speech tag for this token.
# @!attribute [rw] dependency_edge
# @return [Google::Cloud::Language::V1beta2::DependencyEdge]
# Dependency tree parse for this token.
# @!attribute [rw] lemma
# @return [String]
# [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
class Token; end
# Represents the feeling associated with the entire text or entities in
# the text.
# @!attribute [rw] magnitude
# @return [Float]
# A non-negative number in the [0, +inf) range, which represents
# the absolute magnitude of sentiment regardless of score (positive or
# negative).
# @!attribute [rw] score
# @return [Float]
# Sentiment score between -1.0 (negative sentiment) and 1.0
# (positive sentiment).
class Sentiment; end
# Represents part of speech information for a token.
# @!attribute [rw] tag
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Tag]
# The part of speech tag.
# @!attribute [rw] aspect
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Aspect]
# The grammatical aspect.
# @!attribute [rw] case
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Case]
# The grammatical case.
# @!attribute [rw] form
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Form]
# The grammatical form.
# @!attribute [rw] gender
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Gender]
# The grammatical gender.
# @!attribute [rw] mood
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Mood]
# The grammatical mood.
# @!attribute [rw] number
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Number]
# The grammatical number.
# @!attribute [rw] person
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Person]
# The grammatical person.
# @!attribute [rw] proper
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Proper]
# The grammatical properness.
# @!attribute [rw] reciprocity
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Reciprocity]
# The grammatical reciprocity.
# @!attribute [rw] tense
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Tense]
# The grammatical tense.
# @!attribute [rw] voice
# @return [Google::Cloud::Language::V1beta2::PartOfSpeech::Voice]
# The grammatical voice.
class PartOfSpeech
# The part of speech tags enum.
module Tag
# Unknown
UNKNOWN = 0
# Adjective
ADJ = 1
# Adposition (preposition and postposition)
ADP = 2
# Adverb
ADV = 3
# Conjunction
CONJ = 4
# Determiner
DET = 5
# Noun (common and proper)
NOUN = 6
# Cardinal number
NUM = 7
# Pronoun
PRON = 8
# Particle or other function word
PRT = 9
# Punctuation
PUNCT = 10
# Verb (all tenses and modes)
VERB = 11
# Other: foreign words, typos, abbreviations
X = 12
# Affix
AFFIX = 13
end
# The characteristic of a verb that expresses time flow during an event.
module Aspect
# Aspect is not applicable in the analyzed language or is not predicted.
ASPECT_UNKNOWN = 0
# Perfective
PERFECTIVE = 1
# Imperfective
IMPERFECTIVE = 2
# Progressive
PROGRESSIVE = 3
end
# The grammatical function performed by a noun or pronoun in a phrase,
# clause, or sentence. In some languages, other parts of speech, such as
# adjective and determiner, take case inflection in agreement with the noun.
module Case
# Case is not applicable in the analyzed language or is not predicted.
CASE_UNKNOWN = 0
# Accusative
ACCUSATIVE = 1
# Adverbial
ADVERBIAL = 2
# Complementive
COMPLEMENTIVE = 3
# Dative
DATIVE = 4
# Genitive
GENITIVE = 5
# Instrumental
INSTRUMENTAL = 6
# Locative
LOCATIVE = 7
# Nominative
NOMINATIVE = 8
# Oblique
OBLIQUE = 9
# Partitive
PARTITIVE = 10
# Prepositional
PREPOSITIONAL = 11
# Reflexive
REFLEXIVE_CASE = 12
# Relative
RELATIVE_CASE = 13
# Vocative
VOCATIVE = 14
end
# Depending on the language, Form can be categorizing different forms of
# verbs, adjectives, adverbs, etc. For example, categorizing inflected
# endings of verbs and adjectives or distinguishing between short and long
# forms of adjectives and participles
module Form
# Form is not applicable in the analyzed language or is not predicted.
FORM_UNKNOWN = 0
# Adnomial
ADNOMIAL = 1
# Auxiliary
AUXILIARY = 2
# Complementizer
COMPLEMENTIZER = 3
# Final ending
FINAL_ENDING = 4
# Gerund
GERUND = 5
# Realis
REALIS = 6
# Irrealis
IRREALIS = 7
# Short form
SHORT = 8
# Long form
LONG = 9
# Order form
ORDER = 10
# Specific form
SPECIFIC = 11
end
# Gender classes of nouns reflected in the behaviour of associated words.
module Gender
# Gender is not applicable in the analyzed language or is not predicted.
GENDER_UNKNOWN = 0
# Feminine
FEMININE = 1
# Masculine
MASCULINE = 2
# Neuter
NEUTER = 3
end
# The grammatical feature of verbs, used for showing modality and attitude.
module Mood
# Mood is not applicable in the analyzed language or is not predicted.
MOOD_UNKNOWN = 0
# Conditional
CONDITIONAL_MOOD = 1
# Imperative
IMPERATIVE = 2
# Indicative
INDICATIVE = 3
# Interrogative
INTERROGATIVE = 4
# Jussive
JUSSIVE = 5
# Subjunctive
SUBJUNCTIVE = 6
end
# Count distinctions.
module Number
# Number is not applicable in the analyzed language or is not predicted.
NUMBER_UNKNOWN = 0
# Singular
SINGULAR = 1
# Plural
PLURAL = 2
# Dual
DUAL = 3
end
# The distinction between the speaker, second person, third person, etc.
module Person
# Person is not applicable in the analyzed language or is not predicted.
PERSON_UNKNOWN = 0
# First
FIRST = 1
# Second
SECOND = 2
# Third
THIRD = 3
# Reflexive
REFLEXIVE_PERSON = 4
end
# This category shows if the token is part of a proper name.
module Proper
# Proper is not applicable in the analyzed language or is not predicted.
PROPER_UNKNOWN = 0
# Proper
PROPER = 1
# Not proper
NOT_PROPER = 2
end
# Reciprocal features of a pronoun.
module Reciprocity
# Reciprocity is not applicable in the analyzed language or is not
# predicted.
RECIPROCITY_UNKNOWN = 0
# Reciprocal
RECIPROCAL = 1
# Non-reciprocal
NON_RECIPROCAL = 2
end
# Time reference.
module Tense
# Tense is not applicable in the analyzed language or is not predicted.
TENSE_UNKNOWN = 0
# Conditional
CONDITIONAL_TENSE = 1
# Future
FUTURE = 2
# Past
PAST = 3
# Present
PRESENT = 4
# Imperfect
IMPERFECT = 5
# Pluperfect
PLUPERFECT = 6
end
# The relationship between the action that a verb expresses and the
# participants identified by its arguments.
module Voice
# Voice is not applicable in the analyzed language or is not predicted.
VOICE_UNKNOWN = 0
# Active
ACTIVE = 1
# Causative
CAUSATIVE = 2
# Passive
PASSIVE = 3
end
end
# Represents dependency parse tree information for a token.
# @!attribute [rw] head_token_index
# @return [Integer]
# Represents the head of this token in the dependency tree.
# This is the index of the token which has an arc going to this token.
# The index is the position of the token in the array of tokens returned
# by the API method. If this token is a root token, then the
# `head_token_index` is its own index.
# @!attribute [rw] label
# @return [Google::Cloud::Language::V1beta2::DependencyEdge::Label]
# The parse label for the token.
class DependencyEdge
# The parse label enum for the token.
module Label
# Unknown
UNKNOWN = 0
# Abbreviation modifier
ABBREV = 1
# Adjectival complement
ACOMP = 2
# Adverbial clause modifier
ADVCL = 3
# Adverbial modifier
ADVMOD = 4
# Adjectival modifier of an NP
AMOD = 5
# Appositional modifier of an NP
APPOS = 6
# Attribute dependent of a copular verb
ATTR = 7
# Auxiliary (non-main) verb
AUX = 8
# Passive auxiliary
AUXPASS = 9
# Coordinating conjunction
CC = 10
# Clausal complement of a verb or adjective
CCOMP = 11
# Conjunct
CONJ = 12
# Clausal subject
CSUBJ = 13
# Clausal passive subject
CSUBJPASS = 14
# Dependency (unable to determine)
DEP = 15
# Determiner
DET = 16
# Discourse
DISCOURSE = 17
# Direct object
DOBJ = 18
# Expletive
EXPL = 19
# Goes with (part of a word in a text not well edited)
GOESWITH = 20
# Indirect object
IOBJ = 21
# Marker (word introducing a subordinate clause)
MARK = 22
# Multi-word expression
MWE = 23
# Multi-word verbal expression
MWV = 24
# Negation modifier
NEG = 25
# Noun compound modifier
NN = 26
# Noun phrase used as an adverbial modifier
NPADVMOD = 27
# Nominal subject
NSUBJ = 28
# Passive nominal subject
NSUBJPASS = 29
# Numeric modifier of a noun
NUM = 30
# Element of compound number
NUMBER = 31
# Punctuation mark
P = 32
# Parataxis relation
PARATAXIS = 33
# Participial modifier
PARTMOD = 34
# The complement of a preposition is a clause
PCOMP = 35
# Object of a preposition
POBJ = 36
# Possession modifier
POSS = 37
# Postverbal negative particle
POSTNEG = 38
# Predicate complement
PRECOMP = 39
# Preconjunt
PRECONJ = 40
# Predeterminer
PREDET = 41
# Prefix
PREF = 42
# Prepositional modifier
PREP = 43
# The relationship between a verb and verbal morpheme
PRONL = 44
# Particle
PRT = 45
# Associative or possessive marker
PS = 46
# Quantifier phrase modifier
QUANTMOD = 47
# Relative clause modifier
RCMOD = 48
# Complementizer in relative clause
RCMODREL = 49
# Ellipsis without a preceding predicate
RDROP = 50
# Referent
REF = 51
# Remnant
REMNANT = 52
# Reparandum
REPARANDUM = 53
# Root
ROOT = 54
# Suffix specifying a unit of number
SNUM = 55
# Suffix
SUFF = 56
# Temporal modifier
TMOD = 57
# Topic marker
TOPIC = 58
# Clause headed by an infinite form of the verb that modifies a noun
VMOD = 59
# Vocative
VOCATIVE = 60
# Open clausal complement
XCOMP = 61
# Name suffix
SUFFIX = 62
# Name title
TITLE = 63
# Adverbial phrase modifier
ADVPHMOD = 64
# Causative auxiliary
AUXCAUS = 65
# Helper auxiliary
AUXVV = 66
# Rentaishi (Prenominal modifier)
DTMOD = 67
# Foreign words
FOREIGN = 68
# Keyword
KW = 69
# List for chains of comparable items
LIST = 70
# Nominalized clause
NOMC = 71
# Nominalized clausal subject
NOMCSUBJ = 72
# Nominalized clausal passive
NOMCSUBJPASS = 73
# Compound of numeric modifier
NUMC = 74
# Copula
COP = 75
# Dislocated relation (for fronted/topicalized elements)
DISLOCATED = 76
# Aspect marker
ASP = 77
# Genitive modifier
GMOD = 78
# Genitive object
GOBJ = 79
# Infinitival modifier
INFMOD = 80
# Measure
MES = 81
# Nominal complement of a noun
NCOMP = 82
end
end
# Represents a mention for an entity in the text. Currently, proper noun
# mentions are supported.
# @!attribute [rw] text
# @return [Google::Cloud::Language::V1beta2::TextSpan]
# The mention text.
# @!attribute [rw] type
# @return [Google::Cloud::Language::V1beta2::EntityMention::Type]
# The type of the entity mention.
# @!attribute [rw] sentiment
# @return [Google::Cloud::Language::V1beta2::Sentiment]
# For calls to {AnalyzeEntitySentiment} or if
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_entity_sentiment AnnotateTextRequest::Features#extract_entity_sentiment} is set to
# true, this field will contain the sentiment expressed for this mention of
# the entity in the provided document.
class EntityMention
# The supported types of mentions.
module Type
# Unknown
TYPE_UNKNOWN = 0
# Proper name
PROPER = 1
# Common noun (or noun compound)
COMMON = 2
end
end
# Represents an output piece of text.
# @!attribute [rw] content
# @return [String]
# The content of the output text.
# @!attribute [rw] begin_offset
# @return [Integer]
# The API calculates the beginning offset of the content in the original
# document according to the {Google::Cloud::Language::V1beta2::EncodingType EncodingType} specified in the API request.
class TextSpan; end
# Represents a category returned from the text classifier.
# @!attribute [rw] name
# @return [String]
# The name of the category representing the document.
# @!attribute [rw] confidence
# @return [Float]
# The classifier's confidence of the category. Number represents how certain
# the classifier is that this category represents the given text.
class ClassificationCategory; end
# The sentiment analysis request message.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
# @!attribute [rw] encoding_type
# @return [Google::Cloud::Language::V1beta2::EncodingType]
# The encoding type used by the API to calculate sentence offsets for the
# sentence sentiment.
class AnalyzeSentimentRequest; end
# The sentiment analysis response message.
# @!attribute [rw] document_sentiment
# @return [Google::Cloud::Language::V1beta2::Sentiment]
# The overall sentiment of the input document.
# @!attribute [rw] language
# @return [String]
# The language of the text, which will be the same as the language specified
# in the request or, if not specified, the automatically-detected language.
# See {Google::Cloud::Language::V1beta2::Document#language Document#language} field for more details.
# @!attribute [rw] sentences
# @return [Array<Google::Cloud::Language::V1beta2::Sentence>]
# The sentiment for all the sentences in the document.
class AnalyzeSentimentResponse; end
# The entity-level sentiment analysis request message.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
# @!attribute [rw] encoding_type
# @return [Google::Cloud::Language::V1beta2::EncodingType]
# The encoding type used by the API to calculate offsets.
class AnalyzeEntitySentimentRequest; end
# The entity-level sentiment analysis response message.
# @!attribute [rw] entities
# @return [Array<Google::Cloud::Language::V1beta2::Entity>]
# The recognized entities in the input document with associated sentiments.
# @!attribute [rw] language
# @return [String]
# The language of the text, which will be the same as the language specified
# in the request or, if not specified, the automatically-detected language.
# See {Google::Cloud::Language::V1beta2::Document#language Document#language} field for more details.
class AnalyzeEntitySentimentResponse; end
# The entity analysis request message.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
# @!attribute [rw] encoding_type
# @return [Google::Cloud::Language::V1beta2::EncodingType]
# The encoding type used by the API to calculate offsets.
class AnalyzeEntitiesRequest; end
# The entity analysis response message.
# @!attribute [rw] entities
# @return [Array<Google::Cloud::Language::V1beta2::Entity>]
# The recognized entities in the input document.
# @!attribute [rw] language
# @return [String]
# The language of the text, which will be the same as the language specified
# in the request or, if not specified, the automatically-detected language.
# See {Google::Cloud::Language::V1beta2::Document#language Document#language} field for more details.
class AnalyzeEntitiesResponse; end
# The syntax analysis request message.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
# @!attribute [rw] encoding_type
# @return [Google::Cloud::Language::V1beta2::EncodingType]
# The encoding type used by the API to calculate offsets.
class AnalyzeSyntaxRequest; end
# The syntax analysis response message.
# @!attribute [rw] sentences
# @return [Array<Google::Cloud::Language::V1beta2::Sentence>]
# Sentences in the input document.
# @!attribute [rw] tokens
# @return [Array<Google::Cloud::Language::V1beta2::Token>]
# Tokens, along with their syntactic information, in the input document.
# @!attribute [rw] language
# @return [String]
# The language of the text, which will be the same as the language specified
# in the request or, if not specified, the automatically-detected language.
# See {Google::Cloud::Language::V1beta2::Document#language Document#language} field for more details.
class AnalyzeSyntaxResponse; end
# The document classification request message.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
class ClassifyTextRequest; end
# The document classification response message.
# @!attribute [rw] categories
# @return [Array<Google::Cloud::Language::V1beta2::ClassificationCategory>]
# Categories representing the input document.
class ClassifyTextResponse; end
# The request message for the text annotation API, which can perform multiple
# analysis types (sentiment, entities, and syntax) in one call.
# @!attribute [rw] document
# @return [Google::Cloud::Language::V1beta2::Document]
# Input document.
# @!attribute [rw] features
# @return [Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features]
# The enabled features.
# @!attribute [rw] encoding_type
# @return [Google::Cloud::Language::V1beta2::EncodingType]
# The encoding type used by the API to calculate offsets.
class AnnotateTextRequest
# All available features for sentiment, syntax, and semantic analysis.
# Setting each one to true will enable that specific analysis for the input.
# @!attribute [rw] extract_syntax
# @return [true, false]
# Extract syntax information.
# @!attribute [rw] extract_entities
# @return [true, false]
# Extract entities.
# @!attribute [rw] extract_document_sentiment
# @return [true, false]
# Extract document-level sentiment.
# @!attribute [rw] extract_entity_sentiment
# @return [true, false]
# Extract entities and their associated sentiment.
# @!attribute [rw] classify_text
# @return [true, false]
# Classify the full document into categories.
class Features; end
end
# The text annotations response message.
# @!attribute [rw] sentences
# @return [Array<Google::Cloud::Language::V1beta2::Sentence>]
# Sentences in the input document. Populated if the user enables
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_syntax AnnotateTextRequest::Features#extract_syntax}.
# @!attribute [rw] tokens
# @return [Array<Google::Cloud::Language::V1beta2::Token>]
# Tokens, along with their syntactic information, in the input document.
# Populated if the user enables
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_syntax AnnotateTextRequest::Features#extract_syntax}.
# @!attribute [rw] entities
# @return [Array<Google::Cloud::Language::V1beta2::Entity>]
# Entities, along with their semantic information, in the input document.
# Populated if the user enables
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_entities AnnotateTextRequest::Features#extract_entities}.
# @!attribute [rw] document_sentiment
# @return [Google::Cloud::Language::V1beta2::Sentiment]
# The overall sentiment for the document. Populated if the user enables
# {Google::Cloud::Language::V1beta2::AnnotateTextRequest::Features#extract_document_sentiment AnnotateTextRequest::Features#extract_document_sentiment}.
# @!attribute [rw] language
# @return [String]
# The language of the text, which will be the same as the language specified
# in the request or, if not specified, the automatically-detected language.
# See {Google::Cloud::Language::V1beta2::Document#language Document#language} field for more details.
# @!attribute [rw] categories
# @return [Array<Google::Cloud::Language::V1beta2::ClassificationCategory>]
# Categories identified in the input document.
class AnnotateTextResponse; end
# Represents the text encoding that the caller uses to process the output.
# Providing an `EncodingType` is recommended because the API provides the
# beginning offsets for various outputs, such as tokens and mentions, and
# languages that natively use different text encodings may access offsets
# differently.
module EncodingType
# If `EncodingType` is not specified, encoding-dependent information (such as
# `begin_offset`) will be set at `-1`.
NONE = 0
# Encoding-dependent information (such as `begin_offset`) is calculated based
# on the UTF-8 encoding of the input. C++ and Go are examples of languages
# that use this encoding natively.
UTF8 = 1
# Encoding-dependent information (such as `begin_offset`) is calculated based
# on the UTF-16 encoding of the input. Java and Javascript are examples of
# languages that use this encoding natively.
UTF16 = 2
# Encoding-dependent information (such as `begin_offset`) is calculated based
# on the UTF-32 encoding of the input. Python is an example of a language
# that uses this encoding natively.
UTF32 = 3
end
end
end
end
end | quartzmo/gcloud-ruby | google-cloud-language/lib/google/cloud/language/v1beta2/doc/google/cloud/language/v1beta2/language_service.rb | Ruby | apache-2.0 | 33,152 |
# Copyright 2017 The Nuclio Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Uses Microsoft's Face API to extract face information from the
# picture whose URL is submitted in the request body. The result is
# returned as a table of face objects sorted by their center's
# position in the given picture, left-to-right and then top-to-bottom.
#
# You will need a valid key from Microsoft:
# https://azure.microsoft.com/en-gb/try/cognitive-services/?api=face-api
#
# Once a valid Face API key has been acquired, set it and the appropriate
# regional base URL as the environment for this function
# (in the config section).
#
# We can also configure the function inline - through a specially crafted
# comment such as the below. This is functionally equivalent to creating
# a function.yaml file.
import os
import cognitive_face as cf
import tabulate
import inflection
def handler(context, event):
# extract the stuff we need
image_url = event.body.decode('utf-8').strip()
key = os.environ.get('FACE_API_KEY')
base_url = os.environ.get('FACE_API_BASE_URL')
if key is None:
context.logger.warn('Face API key not set, cannot continue')
return _build_response(context, 'Function misconfigured: Face API key not set', 503)
if base_url is None:
context.logger.warn('Face API base URL not set, cannot continue')
return _build_response(context, 'Function misconfigured: Face API base URL not set', 503)
if not image_url:
context.logger.warn('No URL given in request body')
return _build_response(context, 'Image URL required', 400)
# configure cognitive face wrapper
cf.Key.set(key)
cf.BaseUrl.set(base_url)
# attempt to request using the provided info
try:
context.logger.info('Requesting detection from Face API: {0}'.format(image_url))
detected_faces = cf.face.detect(image_url,
face_id=False,
attributes='age,gender,glasses,smile,emotion')
except Exception as error:
context.logger.warn('Face API error occurred: {0}'.format(error))
return _build_response(context, 'Face API error occurred', 503)
parsed_faces = []
# determine the center point of each detected face and map it to its attributes,
# as well as clean up the retreived data for viewing comfort
for face in detected_faces:
coordinates = face['faceRectangle']
attributes = face['faceAttributes']
center_x = coordinates['left'] + coordinates['width'] / 2
center_y = coordinates['top'] + coordinates['height'] / 2
# determine the primary emotion based on its weighing
primary_emotion = sorted(attributes['emotion'].items(), key=lambda item: item[1])[-1][0]
parsed_face = {
'x': center_x,
'y': center_y,
'position': '({0},{1})'.format(int(center_x), int(center_y)),
'gender': inflection.humanize(attributes['gender']),
'age': int(attributes['age']),
'glasses': inflection.humanize(inflection.underscore(attributes['glasses'])),
'primary_emotion': inflection.humanize(primary_emotion),
'smile': '{0:.1f}%'.format(attributes['smile'] * 100),
}
parsed_faces.append(parsed_face)
# sort according to center point, first x then y
parsed_faces.sort(key=lambda face: (face['x'], face['y']))
# prepare the data for tabulation
first_row = ('',) + tuple(face['position'] for face in parsed_faces)
make_row = lambda name: (inflection.humanize(name),) + tuple(
face[name] for face in parsed_faces)
other_rows = [make_row(name) for name in [
'gender', 'age', 'primary_emotion', 'glasses', 'smile']]
# return the human-readable face data in a neat table format
return _build_response(context,
tabulate.tabulate([first_row] + other_rows,
headers='firstrow',
tablefmt='fancy_grid',
numalign='center',
stralign='center'),
200)
def _build_response(context, body, status_code):
return context.Response(body=body,
headers={},
content_type='text/plain',
status_code=status_code)
| nuclio/nuclio | hack/examples/python/facerecognizer/face.py | Python | apache-2.0 | 5,037 |
/*
* Copyright (C) 2013-2015 RoboVM AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Portions of this code is based on Parse's AnyPic sample
* which is copyright (C) 2013 Parse.
*/
package org.robovm.samples.robopods.parse.anypic.ios.ui.views;
import org.robovm.apple.foundation.NSData;
import org.robovm.apple.foundation.NSError;
import org.robovm.apple.uikit.UIImage;
import org.robovm.apple.uikit.UIImageView;
import org.robovm.pods.parse.PFFile;
import org.robovm.pods.parse.PFGetDataCallback;
import org.robovm.samples.robopods.parse.anypic.ios.util.Log;
public class PAPImageView extends UIImageView {
private String url;
public void setFile(PFFile file) {
final String requestURL = file.getUrl(); // Save copy of url locally
url = file.getUrl(); // Save copy of url on the instance
file.getDataInBackground(new PFGetDataCallback() {
@Override
public void done(NSData data, NSError error) {
if (error == null) {
UIImage image = UIImage.create(data);
if (requestURL.equals(url)) {
setImage(image);
setNeedsDisplay();
}
} else {
Log.e("Error on fetching file: %s", error);
}
}
});
}
}
| samskivert/robovm-samples | robopods/parse/anypic/ios/src/main/java/org/robovm/samples/robopods/parse/anypic/ios/ui/views/PAPImageView.java | Java | apache-2.0 | 1,875 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm;
import org.apache.cloudstack.api.InternalIdentity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="nic_details")
public class NicDetailVO implements InternalIdentity {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="id")
private long id;
@Column(name="nic_id")
private long nicId;
@Column(name="name")
private String name;
@Column(name="value", length=1024)
private String value;
public NicDetailVO() {}
public NicDetailVO(long nicId, String name, String value) {
this.nicId = nicId;
this.name = name;
this.value = value;
}
public long getId() {
return id;
}
public long getNicId() {
return nicId;
}
public String getName() {
return name;
}
public String getValue() {
return value;
}
public void setId(long id) {
this.id = id;
}
public void setNicId(long nicId) {
this.nicId = nicId;
}
public void setName(String name) {
this.name = name;
}
public void setValue(String value) {
this.value = value;
}
}
| mufaddalq/cloudstack-datera-driver | server/src/com/cloud/vm/NicDetailVO.java | Java | apache-2.0 | 2,148 |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="Factory.cs" company="PicklesDoc">
// Copyright 2011 Jeffrey Cameron
// Copyright 2012-present PicklesDoc team and community contributors
//
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System.Collections.Generic;
using System.Linq;
using PicklesDoc.Pickles.ObjectModel;
using G = Gherkin.Ast;
namespace PicklesDoc.Pickles.Test.ObjectModel
{
internal class Factory
{
private const G.Location AnyLocation = null;
internal Mapper CreateMapper(string defaultLanguage = "en")
{
var mapper = this.CreateMapper(new Configuration(), defaultLanguage);
return mapper;
}
internal Mapper CreateMapper(IConfiguration configuration, string defaultLanguage = "en")
{
var languageServices = new LanguageServices(defaultLanguage);
var mapper = new Mapper(configuration, languageServices);
return mapper;
}
internal G.TableCell CreateGherkinTableCell(string cellValue)
{
return new G.TableCell(AnyLocation, cellValue);
}
internal G.DocString CreateDocString(string docString = null)
{
return new G.DocString(
AnyLocation,
null,
docString ?? @"My doc string line 1
My doc string line 2");
}
internal G.TableRow CreateGherkinTableRow(params string[] cellValues)
{
return new G.TableRow(
AnyLocation,
cellValues.Select(this.CreateGherkinTableCell).ToArray());
}
internal G.DataTable CreateGherkinDataTable(IEnumerable<string[]> rows)
{
return new G.DataTable(rows.Select(this.CreateGherkinTableRow).ToArray());
}
internal G.Step CreateStep(string keyword, string text)
{
return new G.Step(AnyLocation, keyword, text, null);
}
internal G.Step CreateStep(string keyword, string text, int locationLine, int locationColumn)
{
var step = new G.Step(this.CreateLocation(locationLine, locationColumn), keyword, text, null);
return step;
}
internal G.Step CreateStep(string keyword, string text, string docString)
{
return new G.Step(AnyLocation, keyword, text, this.CreateDocString(docString));
}
internal G.Step CreateStep(string keyword, string text, IEnumerable<string[]> rows)
{
return new G.Step(AnyLocation, keyword, text, this.CreateGherkinDataTable(rows));
}
internal G.Tag CreateTag(string tag)
{
return new G.Tag(AnyLocation, tag);
}
internal G.Location CreateLocation(int line, int column)
{
return new G.Location(line, column);
}
internal G.Comment CreateComment(string comment, int locationLine, int locationColumn)
{
return new G.Comment(this.CreateLocation(locationLine, locationColumn), comment);
}
internal G.Scenario CreateScenario(string[] tags, string name, string description, G.Step[] steps, G.Location location = null)
{
G.Scenario scenario = new G.Scenario(
tags.Select(this.CreateTag).ToArray(),
location ?? AnyLocation,
"Scenario",
name,
description,
steps,
null);
return scenario;
}
internal G.Examples CreateExamples(string name, string description, string[] headerCells, IEnumerable<string[]> exampleRows, string[] tags = null)
{
var examples = new G.Examples(
tags?.Select(this.CreateTag).ToArray(),
AnyLocation,
"Examples",
name,
description,
this.CreateGherkinTableRow(headerCells),
exampleRows.Select(this.CreateGherkinTableRow).ToArray());
return examples;
}
internal G.Scenario CreateScenarioOutline(string[] tags, string name, string description, G.Step[] steps, G.Examples[] examples)
{
G.Scenario scenarioOutline = new G.Scenario(
tags.Select(this.CreateTag).ToArray(),
AnyLocation,
"Scenario",
name,
description,
steps,
examples);
return scenarioOutline;
}
internal G.Background CreateBackground(string name, string description, G.Step[] steps)
{
G.Background background = new G.Background(
AnyLocation,
"Background",
name,
description,
steps);
return background;
}
internal G.GherkinDocument CreateGherkinDocument(string name, string description, string[] tags = null, G.Background background = null, G.IHasLocation [] scenarioDefinitions = null, G.Comment[] comments = null, G.Location location = null, string language = null)
{
var nonNullScenarioDefinitions = scenarioDefinitions ?? new G.IHasLocation[0];
return new G.GherkinDocument(
new G.Feature(
(tags ?? new string[0]).Select(this.CreateTag).ToArray(),
location,
language,
"Feature",
name,
description,
background != null ? new G.Background[] { background }.Concat(nonNullScenarioDefinitions).ToArray() : nonNullScenarioDefinitions),
comments);
}
}
}
| picklesdoc/pickles | src/Pickles.Test/ObjectModel/Factory.cs | C# | apache-2.0 | 6,503 |
package com.zestedesavoir.sdk.internal.query;
/**
* Created by Gerard on 26/02/15.
*/
public interface Parameter {
}
| GerardPaligot/zds-android-sdk | sdk/src/main/java/com/zestedesavoir/sdk/internal/query/Parameter.java | Java | apache-2.0 | 120 |
# Copyright 2014 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import mock
import os
import pytest
import requests
import requests_mock
import struct
from f5.bigip.mixins import AsmFileMixin
from f5.bigip.mixins import CommandExecutionMixin
from f5.bigip.mixins import ToDictMixin
from f5.bigip.resource import Resource
from f5.sdk_exception import EmptyContent
from f5.sdk_exception import MissingHttpHeader
from f5.sdk_exception import UnsupportedMethod
from requests import HTTPError
class MixinTestClass(ToDictMixin):
def __init__(self):
pass
def test_int():
MTCobj = MixinTestClass()
MTCobj.x = 1
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": 1}'
def test_list():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": [1, "a"]}'
def test_list_and_int():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.y = 1
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict, sort_keys=True) == \
'{"x": [1, "a"], "y": 1}'
def test_list_and_int_and_list2():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.y = 1
MTCobj.z = [1, 'a']
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict, sort_keys=True) == \
'{"x": [1, "a"], "y": 1, "z": [1, "a"]}'
def test_two_refs():
MTCobj = MixinTestClass()
MTCobj.x = [1, 'a']
MTCobj.z = MTCobj.x
mtc_as_dict = MTCobj.to_dict()
dict1 = json.dumps(mtc_as_dict, sort_keys=True)
assert dict1 ==\
'{"x": [1, "a"], "z": ["TraversalRecord", "x"]}'
def test_tuple():
MTCobj = MixinTestClass()
MTCobj.x = (1, 'a')
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": [1, "a"]}'
class ToDictMixinAttribute(ToDictMixin):
def __init__(self):
pass
def test_ToDictMixinAttribute():
MTCobj = MixinTestClass()
TDMAttrObj = ToDictMixinAttribute()
MTCobj.x = TDMAttrObj
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": {}}'
def test_ToDictMixinAttribute_Nested():
MTCobj = MixinTestClass()
TDMAttrObj = ToDictMixinAttribute()
TDMAttrObj.y = {'a': 3}
MTCobj.x = TDMAttrObj
mtc_as_dict = MTCobj.to_dict()
assert json.dumps(mtc_as_dict) == '{"x": {"y": {"a": 3}}}'
class DictableClass(object):
def __init__(self):
self.test_attribute = 42
def test_TestClass_Basic():
TDMAttrObj = ToDictMixinAttribute()
TDMAttrObj.y = DictableClass()
mtc_as_dict = TDMAttrObj.to_dict()
assert json.dumps(mtc_as_dict) == '{"y": {"test_attribute": 42}}'
class MockResponse(object):
def __init__(self, attr_dict):
self.__dict__ = attr_dict
def json(self):
return self.__dict__
class FakeCommandResource(CommandExecutionMixin, Resource):
def __init__(self, container):
super(FakeCommandResource, self).__init__(container)
self._meta_data['allowed_commands'] = ['fakecommand', 'fakecommand2']
self._meta_data['required_json_kind'] = 'tm:ltm:fakeendpoint:fakeres'
self._meta_data['allowed_lazy_attributes'] = []
mockuri = 'https://localhost/mgmt/tm/ltm/fakeendpoint/fakeres'
self._meta_data['uri'] = mockuri
self._meta_data['bigip']._meta_data[
'icr_session'].post.return_value =\
MockResponse({"generation": 0, "selfLink": mockuri,
"kind": "tm:ltm:fakeendpoint:fakeres"})
class TestCommandExecutionMixin(object):
def test_create_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.create()
def test_delete_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.delete()
def test_load_raises(self):
command_resource = CommandExecutionMixin()
with pytest.raises(UnsupportedMethod):
command_resource.load()
def test_exec_cmd_instance(self):
fake_res = FakeCommandResource(mock.MagicMock())
cmd1 = fake_res.exec_cmd('fakecommand')
cmd2 = fake_res.exec_cmd('fakecommand2')
assert cmd1 is not cmd2
def fake_http_server(uri, **kwargs):
session = requests.Session()
adapter = requests_mock.Adapter()
session.mount('mock', adapter)
adapter.register_uri('GET', uri, **kwargs)
return session
class FakeAsmFileMixin(AsmFileMixin):
def __init__(self, uri, **kwargs):
session = fake_http_server(uri, **kwargs)
self._meta_data = {'icr_session': session}
self.file_bound_uri = uri
class TestAsmFileMixin(object):
def test_download(self):
# Prepare baseline file
f = open('fakefile.txt', 'wb')
f.write(struct.pack('B', 0))
basefilesize = int(os.stat('fakefile.txt').st_size)
f.close()
# Start Testing
server_fakefile = 'asasasas' * 40
srvfakesize = len(server_fakefile)
header = {'Content-Length': str(srvfakesize),
'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin('mock://test.com/fakefile.txt',
text=server_fakefile, headers=header,
status_code=200)
dwnld._download_file('fakefile.txt')
endfilesize = int(os.stat('fakefile.txt').st_size)
assert basefilesize != srvfakesize
assert endfilesize == srvfakesize
assert endfilesize == 320
def test_404_response(self):
# Cleanup
os.remove('fakefile.txt')
# Test Start
header = {'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fakefile.txt', headers=header,
status_code=404, reason='Not Found')
try:
dwnld._download_file('fakefile.txt')
except HTTPError as err:
assert err.response.status_code == 404
def test_zero_content_length_header(self):
# Test Start
header = {'Content-Type': 'application/text',
'Content-Length': '0'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fake_file.txt', headers=header,
status_code=200)
with pytest.raises(EmptyContent) as err:
dwnld._download_file('fakefile.txt')
msg = "Invalid Content-Length value returned: %s ,the value " \
"should be greater than 0"
assert err.value.message == msg
def test_no_content_length_header(self):
# Test Start
header = {'Content-Type': 'application/text'}
dwnld = FakeAsmFileMixin(
'mock://test.com/fakefile.txt', headers=header,
status_code=200)
with pytest.raises(MissingHttpHeader) as err:
dwnld._download_file('fakefile.txt')
msg = "The Content-Length header is not present."
assert err.value.message == msg
| F5Networks/f5-common-python | f5/bigip/test/unit/test_mixins.py | Python | apache-2.0 | 7,579 |
<?php
namespace App\Controllers;
use jaspion\Controllers\Controller;
/**
* @secured=true
*
*
*/
class InicioController extends Controller {
public function inicioAction() {
$this->render('index');
}
public function erro404() {
$this->render("erro404");
}
public function erro500($ex = null) {
$this->view->erro = $ex;
$this->render("erro500");
}
}
| gilmario-kpslow/cardgame | App/Controllers/InicioController.php | PHP | apache-2.0 | 415 |
package com.github.fhuss.storm.elasticsearch.state;
import com.github.fhuss.storm.elasticsearch.handler.BulkResponseHandler;
import com.github.fhuss.storm.elasticsearch.Document;
import com.github.fhuss.storm.elasticsearch.handler.BulkResponseHandler.LoggerResponseHandler;
import com.github.fhuss.storm.elasticsearch.mapper.TridentTupleMapper;
import storm.trident.operation.TridentCollector;
import storm.trident.state.BaseStateUpdater;
import storm.trident.tuple.TridentTuple;
import java.util.List;
/**
* Simple {@link BaseStateUpdater} implementation for Elasticsearch.
*
* @author fhussonnois
*/
public class ESIndexUpdater<T> extends BaseStateUpdater<ESIndexState<T>> {
private final TridentTupleMapper<Document<T>> documentTupleMapper;
private final BulkResponseHandler bulkResponseHandler;
public ESIndexUpdater(TridentTupleMapper<Document<T>> documentTupleMapper) {
this(documentTupleMapper, new LoggerResponseHandler());
}
public ESIndexUpdater(TridentTupleMapper<Document<T>> docBuilder, BulkResponseHandler bulkResponseHandler) {
this.documentTupleMapper = docBuilder;
this.bulkResponseHandler = bulkResponseHandler;
}
public void updateState(ESIndexState<T> state, List<TridentTuple> inputs, TridentCollector collector) {
state.bulkUpdateIndices(inputs, documentTupleMapper, bulkResponseHandler);
}
}
| lambdacloud/storm-trident-elasticsearch | src/main/java/com/github/fhuss/storm/elasticsearch/state/ESIndexUpdater.java | Java | apache-2.0 | 1,390 |
# Some magic for Python3
try:
import SocketServer as socketserver
except ImportError:
import socketserver
import logging
import sys
import threading
class EchoUDPHandler(socketserver.BaseRequestHandler):
def handle(self):
data = self.request[0].strip()
socket = self.request[1]
logging.info("%s wrote:" % (str(self.client_address[0]),))
logging.info(data)
socket.sendto(data.upper(), self.client_address)
class ServeThread(threading.Thread):
def run(self):
HOST, PORT = 'localhost', 5556
server = socketserver.UDPServer((HOST, PORT), EchoUDPHandler)
server.serve_forever()
def serve():
st = ServeThread()
st.start()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
serve()
| rc500/ardrone_archive_aarons_laptop | ardrone/udpechoserver.py | Python | apache-2.0 | 746 |
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import url
from django.views.decorators.cache import never_cache
from submissions.views import (SubmissionsOptions, SubmissionListAPIView, SubmissionConstructionAPIView,
SubmissionAlterationAPIView, SubmissionDecommissionAPIView,
SubmissionGetAPIView, SubmissionStaffEditAPIView,
PreSignedDocumentKey,)
from gwells.urls import api_path_prefix
urlpatterns = [
# Submissions form options
url(api_path_prefix() + r'/submissions/options$',
never_cache(SubmissionsOptions.as_view()), name='submissions-options'),
# Submissions list
url(api_path_prefix() + r'/submissions$',
never_cache(SubmissionListAPIView.as_view()), name='submissions-list'),
# Submission
url(api_path_prefix() + r'/submissions/(?P<filing_number>[0-9]+)$',
never_cache(SubmissionGetAPIView.as_view()), name='submissions-get'),
# Construction submission
url(api_path_prefix() + r'/submissions/construction$',
never_cache(SubmissionConstructionAPIView.as_view()), name='CON'),
# Alteration submission
url(api_path_prefix() + r'/submissions/alteration$',
never_cache(SubmissionAlterationAPIView.as_view()), name='ALT'),
# Decommission submission
url(api_path_prefix() + r'/submissions/decommission$',
never_cache(SubmissionDecommissionAPIView.as_view()), name='DEC'),
# Edit submission
url(api_path_prefix() + r'/submissions/staff_edit$',
never_cache(SubmissionStaffEditAPIView().as_view()), name='STAFF_EDIT'),
# Document Uploading (submission records)
url(api_path_prefix() + r'/submissions/(?P<submission_id>[0-9]+)/presigned_put_url$',
never_cache(PreSignedDocumentKey.as_view()), name='submissions-pre-signed-url'),
]
| bcgov/gwells | app/backend/submissions/urls.py | Python | apache-2.0 | 2,402 |
package rbac
/*
func TestRoleRelation(t *testing.T) {
db := createDB(t, func(mock sqlmock.Sqlmock) error {
return nil
})
relation := newRoleRelation(db)
if relation == nil {
t.Fatalf("Failed: role relation is nil ")
}
relation.buildRoleLinks()
err := relation.AddRoleForUser("user 1", "member")
if err != nil {
t.Fatalf("Failed to add role for user: %s", err)
}
err = relation.AddRoleForUser("user 2", "member")
if err != nil {
t.Fatalf("Failed to add role for user: %s", err)
}
err = relation.AddRoleForUser("user 3", "admin")
if err != nil {
t.Fatalf("Failed to add role for user: %s", err)
}
err = relation.AddRoleForUser("user 1", "member")
if err != nil {
t.Fatalf("Failed to add role for user: %s", err)
}
}
*/
| gunsluo/go-example | role/rbac/role_relation_test.go | GO | apache-2.0 | 757 |
package javaBean;
import java.sql.Timestamp;
import property.enums.widget.enumWidgetPosition;
public class DevelopedWidget extends Widget{
private float totalReview;
private int reviewCount;
private final String subImagePath;
private final String mainImagePath;
private final String widgetRoot;
private final String sourceRoot;
private final int developerId;
private final String developer;
private enumWidgetPosition position;
private String contents;
private Timestamp updatedDate;
private float version;
public static class Builder{
private float totalReview;
private int reviewCount;
private String subImagePath;
private String mainImagePath;
private String widgetRoot;
private String sourceRoot;
private float version;
private int developerId;
private String developer;
private enumWidgetPosition position;
private String contents;
private Timestamp updatedDate;
//neccessary field
private final String wName;
private final int wId;
private final String kind;
public Builder(int wId, String wName, String kind){
this.wName = wName;
this.wId = wId;
this.kind = kind;
}
public Builder developer(String dev){
developer = dev; return this;
}
public Builder developerId(int devId){
developerId = devId; return this;
}
public Builder position(String pos){
boolean isEmpty = true;
for(enumWidgetPosition p : enumWidgetPosition.values()){
if(p.getString().equalsIgnoreCase(pos)){
isEmpty = false;
position = p;
}
}
if(isEmpty)
throw new IllegalArgumentException("파라메티의 position이 서비스에 존재 하지 않는 값입니다.");
return this;
}
public Builder contents(String con){
contents = con; return this;
}
public Builder widgetRoot(String root){
widgetRoot = root; return this;
}
public Builder updatedDate(Timestamp date){
updatedDate = date; return this;
}
public Builder version(float ver){
version = ver; return this;
}
public Builder sourceRoot(String root){
sourceRoot = root; return this;
}
public Builder totalReview(float p){
totalReview = p; return this;
}
public Builder reviewCount(int c){
reviewCount = c; return this;
}
public Builder subImagePath(String path){
subImagePath = path; return this;
}
public Builder mainImagePath(String path){
mainImagePath = path; return this;
}
public DevelopedWidget build() throws Exception{
return new DevelopedWidget(this);
}
}
private DevelopedWidget(Builder b) throws Exception{
super(b.wId, b.wName, b.kind);
this.sourceRoot = b.sourceRoot;
this.contents = b.contents;
this.developer = b.developer;
this.developerId = b.developerId;
this.position = b.position;
this.updatedDate = b.updatedDate;
this.widgetRoot = b.widgetRoot;
this.version = b.version;
this.mainImagePath = b.mainImagePath;
this.subImagePath = b.subImagePath;
this.totalReview = b.totalReview;
this.reviewCount = b.reviewCount;
}
public int getDeveloperId() {
return developerId;
}
public String getDeveloper() {
return developer;
}
public enumWidgetPosition getPosition() {
return position;
}
public String getContents() {
return contents;
}
public String getWidgetRoot() {
return widgetRoot;
}
public Timestamp getUpdatedDate() {
return updatedDate;
}
public float getVersion() {
return version;
}
public void setVersion(float version) {
this.version = version;
}
public float getTotalReview() {
return totalReview;
}
public int getReviewCount() {
return reviewCount;
}
public String getSubImagePath() {
return subImagePath;
}
public String getMainImagePath() {
return mainImagePath;
}
public String getSourceRoot() {
return sourceRoot;
}
public void setTotalReview(float totalReview) {
this.totalReview = totalReview;
}
public void setReviewCount(int reviewCount) {
this.reviewCount = reviewCount;
}
public void setPosition(String position) {
boolean isEmpty=true;
for(enumWidgetPosition k : enumWidgetPosition.values()){
if(k.getString().equals(position)){
this.position = k;
isEmpty = false;
}
}
if(isEmpty)
throw new IllegalArgumentException("해당하는 position이 없습니다.");
}
public void setContents(String contents) {
this.contents = contents;
}
public void setUpdatedDate(Timestamp updatedDate) {
this.updatedDate = updatedDate;
}
}
| PuppyRush/WidgetStore | src/javaBean/DevelopedWidget.java | Java | apache-2.0 | 4,522 |
/**
* @license Copyright 2018 The Lighthouse Authors. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
'use strict';
const Audit = require('./audit.js');
const URL = require('../lib/url-shim.js');
const NetworkRecords = require('../computed/network-records.js');
class NetworkRequests extends Audit {
/**
* @return {LH.Audit.Meta}
*/
static get meta() {
return {
id: 'network-requests',
scoreDisplayMode: Audit.SCORING_MODES.INFORMATIVE,
title: 'Network Requests',
description: 'Lists the network requests that were made during page load.',
requiredArtifacts: ['devtoolsLogs'],
};
}
/**
* @param {LH.Artifacts} artifacts
* @param {LH.Audit.Context} context
* @return {Promise<LH.Audit.Product>}
*/
static audit(artifacts, context) {
const devtoolsLog = artifacts.devtoolsLogs[Audit.DEFAULT_PASS];
return NetworkRecords.request(devtoolsLog, context).then(records => {
const earliestStartTime = records.reduce(
(min, record) => Math.min(min, record.startTime),
Infinity
);
/** @param {number} time */
const timeToMs = time => time < earliestStartTime || !Number.isFinite(time) ?
undefined : (time - earliestStartTime) * 1000;
const results = records.map(record => {
const endTimeDeltaMs = record.lrStatistics && record.lrStatistics.endTimeDeltaMs;
const TCPMs = record.lrStatistics && record.lrStatistics.TCPMs;
const requestMs = record.lrStatistics && record.lrStatistics.requestMs;
const responseMs = record.lrStatistics && record.lrStatistics.responseMs;
return {
url: URL.elideDataURI(record.url),
startTime: timeToMs(record.startTime),
endTime: timeToMs(record.endTime),
finished: record.finished,
transferSize: record.transferSize,
resourceSize: record.resourceSize,
statusCode: record.statusCode,
mimeType: record.mimeType,
resourceType: record.resourceType,
lrEndTimeDeltaMs: endTimeDeltaMs, // Only exists on Lightrider runs
lrTCPMs: TCPMs, // Only exists on Lightrider runs
lrRequestMs: requestMs, // Only exists on Lightrider runs
lrResponseMs: responseMs, // Only exists on Lightrider runs
};
});
// NOTE(i18n): this audit is only for debug info in the LHR and does not appear in the report.
/** @type {LH.Audit.Details.Table['headings']} */
const headings = [
{key: 'url', itemType: 'url', text: 'URL'},
{key: 'startTime', itemType: 'ms', granularity: 1, text: 'Start Time'},
{key: 'endTime', itemType: 'ms', granularity: 1, text: 'End Time'},
{
key: 'transferSize',
itemType: 'bytes',
displayUnit: 'kb',
granularity: 1,
text: 'Transfer Size',
},
{
key: 'resourceSize',
itemType: 'bytes',
displayUnit: 'kb',
granularity: 1,
text: 'Resource Size',
},
{key: 'statusCode', itemType: 'text', text: 'Status Code'},
{key: 'mimeType', itemType: 'text', text: 'MIME Type'},
{key: 'resourceType', itemType: 'text', text: 'Resource Type'},
];
const tableDetails = Audit.makeTableDetails(headings, results);
return {
score: 1,
details: tableDetails,
};
});
}
}
module.exports = NetworkRequests;
| umaar/lighthouse | lighthouse-core/audits/network-requests.js | JavaScript | apache-2.0 | 3,962 |
/*
* Copyright (C) 2010 The MobileSecurePay Project
* All right reserved.
* author: shiqun.shi@alipay.com
*/
package cn.com.bluemoon.jeesite.modules.pay.service.alipay;
public final class Base64 {
static private final int BASELENGTH = 128;
static private final int LOOKUPLENGTH = 64;
static private final int TWENTYFOURBITGROUP = 24;
static private final int EIGHTBIT = 8;
static private final int SIXTEENBIT = 16;
static private final int FOURBYTE = 4;
static private final int SIGN = -128;
static private final char PAD = '=';
static private final boolean fDebug = false;
static final private byte[] base64Alphabet = new byte[BASELENGTH];
static final private char[] lookUpBase64Alphabet = new char[LOOKUPLENGTH];
static {
for (int i = 0; i < BASELENGTH; ++i) {
base64Alphabet[i] = -1;
}
for (int i = 'Z'; i >= 'A'; i--) {
base64Alphabet[i] = (byte) (i - 'A');
}
for (int i = 'z'; i >= 'a'; i--) {
base64Alphabet[i] = (byte) (i - 'a' + 26);
}
for (int i = '9'; i >= '0'; i--) {
base64Alphabet[i] = (byte) (i - '0' + 52);
}
base64Alphabet['+'] = 62;
base64Alphabet['/'] = 63;
for (int i = 0; i <= 25; i++) {
lookUpBase64Alphabet[i] = (char) ('A' + i);
}
for (int i = 26, j = 0; i <= 51; i++, j++) {
lookUpBase64Alphabet[i] = (char) ('a' + j);
}
for (int i = 52, j = 0; i <= 61; i++, j++) {
lookUpBase64Alphabet[i] = (char) ('0' + j);
}
lookUpBase64Alphabet[62] = (char) '+';
lookUpBase64Alphabet[63] = (char) '/';
}
private static boolean isWhiteSpace(char octect) {
return (octect == 0x20 || octect == 0xd || octect == 0xa || octect == 0x9);
}
private static boolean isPad(char octect) {
return (octect == PAD);
}
private static boolean isData(char octect) {
return (octect < BASELENGTH && base64Alphabet[octect] != -1);
}
/**
* Encodes hex octects into Base64
*
* @param binaryData
* Array containing binaryData
* @return Encoded Base64 array
*/
public static String encode(byte[] binaryData) {
if (binaryData == null) {
return null;
}
int lengthDataBits = binaryData.length * EIGHTBIT;
if (lengthDataBits == 0) {
return "";
}
int fewerThan24bits = lengthDataBits % TWENTYFOURBITGROUP;
int numberTriplets = lengthDataBits / TWENTYFOURBITGROUP;
int numberQuartet = fewerThan24bits != 0 ? numberTriplets + 1
: numberTriplets;
char encodedData[] = null;
encodedData = new char[numberQuartet * 4];
byte k = 0, l = 0, b1 = 0, b2 = 0, b3 = 0;
int encodedIndex = 0;
int dataIndex = 0;
if (fDebug) {
System.out.println("number of triplets = " + numberTriplets);
}
for (int i = 0; i < numberTriplets; i++) {
b1 = binaryData[dataIndex++];
b2 = binaryData[dataIndex++];
b3 = binaryData[dataIndex++];
if (fDebug) {
System.out.println("b1= " + b1 + ", b2= " + b2 + ", b3= " + b3);
}
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2)
: (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4)
: (byte) ((b2) >> 4 ^ 0xf0);
byte val3 = ((b3 & SIGN) == 0) ? (byte) (b3 >> 6)
: (byte) ((b3) >> 6 ^ 0xfc);
if (fDebug) {
System.out.println("val2 = " + val2);
System.out.println("k4 = " + (k << 4));
System.out.println("vak = " + (val2 | (k << 4)));
}
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[(l << 2) | val3];
encodedData[encodedIndex++] = lookUpBase64Alphabet[b3 & 0x3f];
}
// form integral number of 6-bit groups
if (fewerThan24bits == EIGHTBIT) {
b1 = binaryData[dataIndex];
k = (byte) (b1 & 0x03);
if (fDebug) {
System.out.println("b1=" + b1);
System.out.println("b1<<2 = " + (b1 >> 2));
}
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2)
: (byte) ((b1) >> 2 ^ 0xc0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[k << 4];
encodedData[encodedIndex++] = PAD;
encodedData[encodedIndex++] = PAD;
} else if (fewerThan24bits == SIXTEENBIT) {
b1 = binaryData[dataIndex];
b2 = binaryData[dataIndex + 1];
l = (byte) (b2 & 0x0f);
k = (byte) (b1 & 0x03);
byte val1 = ((b1 & SIGN) == 0) ? (byte) (b1 >> 2)
: (byte) ((b1) >> 2 ^ 0xc0);
byte val2 = ((b2 & SIGN) == 0) ? (byte) (b2 >> 4)
: (byte) ((b2) >> 4 ^ 0xf0);
encodedData[encodedIndex++] = lookUpBase64Alphabet[val1];
encodedData[encodedIndex++] = lookUpBase64Alphabet[val2 | (k << 4)];
encodedData[encodedIndex++] = lookUpBase64Alphabet[l << 2];
encodedData[encodedIndex++] = PAD;
}
return new String(encodedData);
}
/**
* Decodes Base64 data into octects
*
* @param encoded
* string containing Base64 data
* @return Array containind decoded data.
*/
public static byte[] decode(String encoded) {
if (encoded == null) {
return null;
}
char[] base64Data = encoded.toCharArray();
// remove white spaces
int len = removeWhiteSpace(base64Data);
if (len % FOURBYTE != 0) {
return null;// should be divisible by four
}
int numberQuadruple = (len / FOURBYTE);
if (numberQuadruple == 0) {
return new byte[0];
}
byte decodedData[] = null;
byte b1 = 0, b2 = 0, b3 = 0, b4 = 0;
char d1 = 0, d2 = 0, d3 = 0, d4 = 0;
int i = 0;
int encodedIndex = 0;
int dataIndex = 0;
decodedData = new byte[(numberQuadruple) * 3];
for (; i < numberQuadruple - 1; i++) {
if (!isData((d1 = base64Data[dataIndex++]))
|| !isData((d2 = base64Data[dataIndex++]))
|| !isData((d3 = base64Data[dataIndex++]))
|| !isData((d4 = base64Data[dataIndex++]))) {
return null;
}// if found "no data" just return null
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
if (!isData((d1 = base64Data[dataIndex++]))
|| !isData((d2 = base64Data[dataIndex++]))) {
return null;// if found "no data" just return null
}
b1 = base64Alphabet[d1];
b2 = base64Alphabet[d2];
d3 = base64Data[dataIndex++];
d4 = base64Data[dataIndex++];
if (!isData((d3)) || !isData((d4))) {// Check if they are PAD characters
if (isPad(d3) && isPad(d4)) {
if ((b2 & 0xf) != 0)// last 4 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 1];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex] = (byte) (b1 << 2 | b2 >> 4);
return tmp;
} else if (!isPad(d3) && isPad(d4)) {
b3 = base64Alphabet[d3];
if ((b3 & 0x3) != 0)// last 2 bits should be zero
{
return null;
}
byte[] tmp = new byte[i * 3 + 2];
System.arraycopy(decodedData, 0, tmp, 0, i * 3);
tmp[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
tmp[encodedIndex] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
return tmp;
} else {
return null;
}
} else { // No PAD e.g 3cQl
b3 = base64Alphabet[d3];
b4 = base64Alphabet[d4];
decodedData[encodedIndex++] = (byte) (b1 << 2 | b2 >> 4);
decodedData[encodedIndex++] = (byte) (((b2 & 0xf) << 4) | ((b3 >> 2) & 0xf));
decodedData[encodedIndex++] = (byte) (b3 << 6 | b4);
}
return decodedData;
}
/**
* remove WhiteSpace from MIME containing encoded Base64 data.
*
* @param data
* the byte array of base64 data (with WS)
* @return the new length
*/
private static int removeWhiteSpace(char[] data) {
if (data == null) {
return 0;
}
// count characters that's not whitespace
int newSize = 0;
int len = data.length;
for (int i = 0; i < len; i++) {
if (!isWhiteSpace(data[i])) {
data[newSize++] = data[i];
}
}
return newSize;
}
}
| zhangleidaniejian/bluemmSite | src/main/java/cn/com/bluemoon/jeesite/modules/pay/service/alipay/Base64.java | Java | apache-2.0 | 8,005 |
/*
* Ark-Sys Kft. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package hu.ibello.pages;
import hu.ibello.actions.BrowserActionBuilder;
import hu.ibello.actions.KeyHelper;
import hu.ibello.actions.Task;
import hu.ibello.actions.TaskRepeater;
import hu.ibello.actions.WebElementActionBuilder;
import hu.ibello.check.WebElementChecker;
import hu.ibello.check.WebElementGetter;
import hu.ibello.core.Browser;
import hu.ibello.core.Value;
import hu.ibello.data.TestDataBuilder;
import hu.ibello.elements.WebElement;
import hu.ibello.expect.ExpectationBuilder;
import hu.ibello.search.SearchTool;
/**
* Helper interface for page objects.
* <p>
* There is no need to use this interface directly.
* The <em>ibello</em> dependency injection system automatically instantiates and injects
* an instance into page objects.
* </p>
* @see PageObject
* @author Kornél Simon
*/
public interface PageObjectTool {
/**
* <p>
* Returns a {@link SearchTool} instance which is used to search elements on the page.
* </p>
* <p>
* The returned instance offers a fluent interface for element search. Example:
* </p>
* <pre>
* WebElement image = ...;
* WebElement child = find().using(By.TAG_NAME, "span").leftFrom(image).first();
* </pre>
* @param windowId identifier of the browser window
* @return an object used for element search on the page
*/
SearchTool find(String windowId);
/**
* Returns a configuration property as a {@link Value}. The returned value offers some public methods to
* transform the configuration property into different java types.
* This method always has a non-null result, even if the configuration value does not exist - in this case,
* the wrapped value will be <code>null</code>.
* @param name name of the configuration parameter
* @return value of the configuration parameter wrapped into a {@link Value} instance
*/
Value getConfigurationValue(String name);
/**
* Returns an URL which is merged with the <code>ibello.url.base</code> configuration property.
* <p>
* If the given URL is an absolute one with protocol, then it will be returned (without any changes).
* </p>
* <p>
* If the URL is relative, then it will be concatenated to the configuration property, and the result will be returned.
* </p>
* @param url the absolute or relative URL
* @return the merged URL
*/
String getMergedURL(String url);
/**
* Returns a {@link Browser} instance which can be used for different browser-specific actions,
* including element search and opening URL.
* @param windowId identifier of the browser window
* @return an interface which offers browser-specific actions
*/
Browser browser(String windowId);
/**
* Returns an {@link WebElementActionBuilder} instance which can be used to perform an action on the web element.
* If the action fails then the test execution stops.
* @param element we want to perform an action with this elements
* @return an interface configured for doing actions with the element
*/
WebElementActionBuilder doWith(WebElement element);
/**
* Returns an {@link WebElementActionBuilder} instance which can be used to perform an action on the web element.
* If the action fails then the test execution continues.
* @param element we want to perform an action with this elements
* @return an interface configured for doing actions with the element
*/
WebElementActionBuilder tryWith(WebElement element);
/**
* Returns an {@link BrowserActionBuilder} instance which can be used to manager browser window related actions.
* If the action fails then test execution stops.
* @param browser the browser instance, can be obtained with the {@link PageObject#browser()} method
* @return an interface configured for doing actions with the browser
*/
BrowserActionBuilder doWith(Browser browser);
/**
* Returns an {@link BrowserActionBuilder} instance which can be used to manager browser window related actions.
* If the action fails then test execution continues.
* @param browser the browser instance, can be obtained with the {@link PageObject#browser()} method
* @return an interface configured for doing actions with the browser
*/
BrowserActionBuilder tryWith(Browser browser);
/**
* Returns a {@link TaskRepeater} instance which will run the given task multiple times.
* The methods of this instance are customizing the exit condition of this repeat process.
* @param task the task we want to repeat
* @return an interface configured for repeating the task
*/
TaskRepeater repeat(Task task);
/**
* Returns a {@link WebElementChecker} instance which can be used to inspect a {@link WebElement}'s state and have
* some boolean result about it. The investigation holds until the element is accessible (or the time runs out).
* @param element the element we need to investigate
* @return an interface configured for running investigation on the element
*/
WebElementChecker checkThat(WebElement element);
/**
* Returns a {@link WebElementGetter} instance which can be used to get some property of the given {@link WebElement}.
* The operation waits until the element is accessible (or the time runs out).
* @param element the element
* @return an interface configured for querying properties of the element
*/
WebElementGetter get(WebElement element);
/**
* <p>
* Returns an {@link ExpectationBuilder} instance which can be used to build and execute an expectation.
* </p>
* <p>
* The returned instance offers a fluent interface for element search. Example:
* </p>
* <pre>
* WebElement okButton = ...;
* WebElements buttons = ...;
* expectations().expect(okButton).toBe().visible();
* expectations().expect(buttons).toHave().size(5);
* expectations().expect(browser()).toHave().url("http://localhost/page");
* </pre>
* @param windowId identifier of the browser window
* @return an {@link ExpectationBuilder} instance which is configured to run expectations
*/
ExpectationBuilder expectations(String windowId);
/**
* Returns a {@link KeyHelper} instance, which offers special keys and key modifiers.
* Those can be used in {@link WebElementActionBuilder#sendKeys(CharSequence...)} and
* {@link WebElementActionBuilder#sendKeys(hu.ibello.actions.KeyModifier, CharSequence...)} methods.
* @return a {@link KeyHelper} instance
*/
KeyHelper keys();
/**
* Starts a new test data loading. The result is a {@link TestDataBuilder} instance which can be used
* to configure and perform the test loading operation.
* @return a test data loader instance
* @see TestDataBuilder
*/
TestDataBuilder testData();
}
| kokog78/ibello-api | src/main/java/hu/ibello/pages/PageObjectTool.java | Java | apache-2.0 | 7,212 |
/*============================================================
Problem: Simplify Path
==============================================================
Given an absolute path for a file (Unix-style), simplify it.
For example,
path = "/home/", => "/home"
path = "/a/./b/../../c/", => "/c"
Corner Cases:
Did you consider the case where path = "/../"?
In this case, you should return "/".
Another corner case is the path might contain multiple
slashes '/' together, such as "/home//foo/".
In this case, you should ignore redundant slashes and
return "/home/foo".
============================================================*/
class Solution {
public:
string simplifyPath(string path) {
stack<string> pathStack;
int i=0, n=path.size();
while (i<n) {
while (path[i] == '/')
i++;
string s = "";
while (i<n && path[i] != '/')
s += path[i++];
if (".." == s && !pathStack.empty())
pathStack.pop();
else if(s != "" && s != "." && s != "..")
pathStack.push(s);
}
if(pathStack.empty())
return "/";
string s = "";
while (!pathStack.empty()) {
s = "/" + pathStack.top() + s;
pathStack.pop();
}
return s;
}
}; | LiFantastic/LeetCode | #071.cpp | C++ | apache-2.0 | 1,370 |
package com.material.am.dialogplus;
/*
* Copyright 2014 Orhan Obut
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
* */
/**
* @author yonghoon.do
*/
public interface OnCancelListener {
public void onCancel(DialogPlus dialog);
}
| 13120241790/MaterialDesginAM | ammaterialdesign/src/main/java/com/material/am/dialogplus/OnCancelListener.java | Java | apache-2.0 | 708 |
<?php include("header.php"); ?>
<!-- Page Content -->
<div id="page-content-wrapper">
<div class="container-fluid">
<div class="row">
<div class="col-lg-12">
<h1>Server Statistics</h1>
<?php
include('read_stats.php');
?>
<a href="#menu-toggle" class="btn btn-default" id="menu-toggle">Toggle Menu</a>
</div>
</div>
</div>
</div>
<!-- /#page-content-wrapper -->
<?php include("close_header.html"); ?>
| henfredemars/php-wall-project | stats.php | PHP | apache-2.0 | 562 |
/**
* IGroupHandler.java
* com.oppo.base.cache.group
*
* Function: TODO
*
* ver date author
* ──────────────────────────────────
* 1.0 2011-10-25 80036381
*
* Copyright (c) 2011 NearMe, All Rights Reserved.
*/
package com.oppo.base.cache.group;
import java.util.List;
import com.oppo.base.cache.StorageObject;
/**
* ClassName:IGroupHandler
* Function: 一组cache的key处理
*
* @author 80036381
* @version
* @since Ver 1.1
* @Date 2011-10-25 下午05:22:42
*/
public interface IGroupHandler<T> {
/**
* 将多个CacheSelector合并为一个,以方便进行批量查询
* Function Description here
* @param
* @return
*/
StorageObject combin(List<StorageObject> csList);
/**
* 根据从存储中查到的值获取对应的cacheKey
* @param tObj 从存储中查到的值
* @return
*/
String getRelateKey(T tObj);
}
| jabelai/Neverland | J2EE/BaseTools/src/main/java/com/oppo/base/cache/group/IGroupHandler.java | Java | apache-2.0 | 1,006 |
/* Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/framework/details/build_strategy.h"
#include "paddle/fluid/framework/details/multi_devices_graph_check_pass.h"
#include "paddle/fluid/framework/details/multi_devices_graph_print_pass.h"
#include "paddle/fluid/framework/details/sequential_execution_pass.h"
#include "paddle/fluid/framework/ir/graph.h"
#include "paddle/fluid/framework/ir/graph_viz_pass.h"
namespace paddle {
namespace framework {
namespace details {
class ParallelExecutorPassBuilder : public ir::PassBuilder {
public:
explicit ParallelExecutorPassBuilder(const BuildStrategy &strategy)
: ir::PassBuilder(), strategy_(strategy) {
if (strategy_.enable_sequential_execution_) {
AppendPass("sequential_execution_pass");
}
// Add a graph viz pass to record a graph.
if (!strategy_.debug_graphviz_path_.empty()) {
auto viz_pass = AppendPass("graph_viz_pass");
const std::string graph_path = string::Sprintf(
"%s%s", strategy_.debug_graphviz_path_.c_str(), "_original_graph");
viz_pass->Set<std::string>("graph_viz_path", new std::string(graph_path));
}
// Add op fusion.
if (strategy.fuse_elewise_add_act_ops_) {
auto fuse_elewise_add_act_pass = AppendPass("fuse_elewise_add_act_pass");
// Add a graph viz pass to record a graph.
if (!strategy.debug_graphviz_path_.empty()) {
auto viz_pass = AppendPass("graph_viz_pass");
const std::string graph_path = string::Sprintf(
"%s%s", strategy.debug_graphviz_path_.c_str(), "_fused_graph");
viz_pass->Set<std::string>("graph_viz_path",
new std::string(graph_path));
}
}
// Convert graph to run on multi-devices.
auto multi_devices_pass = AppendPass("multi_devices_pass");
multi_devices_pass->SetNotOwned<const BuildStrategy>("strategy",
&strategy_);
// Add a graph print pass to record a graph with device info.
if (!strategy_.debug_graphviz_path_.empty()) {
auto multi_devices_print_pass = AppendPass("multi_devices_print_pass");
multi_devices_print_pass->SetNotOwned<const std::string>(
"debug_graphviz_path", &strategy_.debug_graphviz_path_);
multi_devices_print_pass->Set<details::GraphvizSSAGraphPrinter>(
"graph_printer", new details::GraphvizSSAGraphPrinter);
}
// Verify that the graph is correct for multi-device executor.
AppendPass("multi_devices_check_pass");
}
private:
BuildStrategy strategy_;
};
std::shared_ptr<ir::PassBuilder> BuildStrategy::CreatePassesFromStrategy()
const {
pass_builder_.reset(new ParallelExecutorPassBuilder(*this));
return pass_builder_;
}
std::unique_ptr<ir::Graph> BuildStrategy::Apply(
const ProgramDesc &main_program, const std::vector<platform::Place> &places,
const std::string &loss_var_name,
const std::unordered_set<std::string> ¶m_names,
const std::vector<Scope *> &local_scopes,
#ifdef PADDLE_WITH_CUDA
const bool use_cuda, platform::NCCLContextMap *nccl_ctxs) const {
#else
const bool use_cuda) const {
#endif
// Create a default one if not initialized by user.
if (!pass_builder_) {
CreatePassesFromStrategy();
}
std::unique_ptr<ir::Graph> graph(new ir::Graph(main_program));
for (std::shared_ptr<ir::Pass> &pass : pass_builder_->AllPasses()) {
if (pass->Type() == "multi_devices_pass") {
pass->Erase("places");
pass->SetNotOwned<const std::vector<platform::Place>>("places", &places);
pass->Erase("loss_var_name");
pass->SetNotOwned<const std::string>("loss_var_name", &loss_var_name);
pass->Erase("params");
pass->SetNotOwned<const std::unordered_set<std::string>>("params",
¶m_names);
pass->Erase("local_scopes");
pass->SetNotOwned<const std::vector<Scope *>>("local_scopes",
&local_scopes);
#ifdef PADDLE_WITH_CUDA
platform::NCCLContextMap *nctx = use_cuda ? nccl_ctxs : nullptr;
pass->Erase("nccl_ctxs");
pass->SetNotOwned<platform::NCCLContextMap>("nccl_ctxs", nctx);
#endif
} else if (pass->Type() == "sequential_execution_pass") {
pass->Erase(kAllOpDescs);
pass->Set<const std::vector<OpDesc *>>(
kAllOpDescs,
new std::vector<OpDesc *>(main_program.Block(0).AllOps()));
}
graph = pass->Apply(std::move(graph));
}
return graph;
}
} // namespace details
} // namespace framework
} // namespace paddle
USE_PASS(fuse_elewise_add_act_pass);
USE_PASS(graph_viz_pass);
USE_PASS(multi_batch_merge_pass);
USE_PASS(multi_devices_pass);
USE_PASS(multi_devices_check_pass);
USE_PASS(multi_devices_print_pass);
USE_PASS(sequential_execution_pass);
| reyoung/Paddle | paddle/fluid/framework/details/build_strategy.cc | C++ | apache-2.0 | 5,422 |
module.exports.handler = (event, context) => {
if (!context.iopipe || !context.iopipe.mark) {
return context.succeed(new Error('No plugins'));
}
return context.succeed(200);
};
| iopipe/serverless-plugin-iopipe | testProjects/cosmi/handlers/simple.js | JavaScript | apache-2.0 | 187 |
package org.aktin.broker.rest;
import java.io.IOException;
import java.io.Reader;
import java.net.URISyntaxException;
import java.sql.SQLException;
import java.time.Instant;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.inject.Inject;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.NotAcceptableException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.OPTIONS;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import org.aktin.broker.db.BrokerBackend;
import org.aktin.broker.util.RequestTypeManager;
import org.aktin.broker.websocket.MyBrokerWebsocket;
import org.aktin.broker.websocket.RequestAdminWebsocket;
import org.aktin.broker.xml.RequestInfo;
import org.aktin.broker.xml.RequestList;
import org.aktin.broker.xml.RequestStatusInfo;
import org.aktin.broker.xml.RequestStatusList;
import org.aktin.broker.xml.RequestTargetNodes;
/**
* RESTful HTTP endpoint to manage broker requests.
*
* Request management is not done by client nodes. For client node usage,
* see {@link MyBrokerEndpoint}.
*
* @author R.W.Majeed
*
*/
@Authenticated
@RequireAdmin
@Path("/broker/request")
public class RequestAdminEndpoint extends AbstractRequestEndpoint{
private static final Logger log = Logger.getLogger(RequestAdminEndpoint.class.getName());
@Inject
private BrokerBackend db;
@Inject
private RequestTypeManager typeManager;
/***
* Create a new request at the broker and return the request location in the HTTP-response
* header {@code Location}.
*
* This method can be used either to create an empty request if no {@code Content-type} header
* is given (e.g. to add multiple content representations later) or it can be used to create
* a request already containing a request definition.
* In the latter case the {@code Content-type} header must be present and the HTTP payload is
* used for the representation.
*
* @param content HTTP payload to use as first content representation
* @param headers HTTP request headers containing Content-type
* @param info URI info for the response location
* @return request object with status 201 and location header.
* @throws URISyntaxException URI syntax processing error
*/
@POST
public Response createRequest(Reader content, @Context HttpHeaders headers, @Context UriInfo info) throws URISyntaxException{
MediaType type = headers.getMediaType();
try {
int id;
if( type != null ){
// remove charset information, since we already have the string representation
type = removeCharsetInfo(type);
id = db.createRequest(type.toString(), content);
}else{
// no content type passed
// TODO verify that also no content is given (data without content type)
id = db.createRequest();
}
String ref = "/broker/request/"+Integer.toString(id);
// may return the wrong scheme (eg http instead of https) behind reverse proxies
//return Response.created(new URI(ref)).build();
// allow override via system property
UriBuilder ub = info.getBaseUriBuilder().path(ref);
String forceScheme = System.getProperty("force.uri.scheme");
if( forceScheme != null ){
log.info("Forcing response location URI scheme "+forceScheme);
ub.scheme(forceScheme);
}
// notify listeners
RequestAdminWebsocket.broadcastRequestCreated(id);
return Response.created(ub.build()).build();
} catch (SQLException e) {
log.log(Level.SEVERE, "Unable to create request", e);
return Response.serverError().build();
}
}
/**
* Add (additional) request definitions to an existing request. This method is similar
* to {@link #createRequest(Reader, HttpHeaders, UriInfo)} but expects the request
* to be already existing.
*
* @param requestId request id
* @param content request definition
* @param headers Content-type header
* @throws BadRequestException Unable to parse numeric request id
*/
@PUT
@Path("{id}")
public void addRequestDefinition(@PathParam("id") String requestId, Reader content, @Context HttpHeaders headers) throws BadRequestException{
MediaType type = headers.getMediaType();
try {
// remove charset information, since we already have the string representation
type = removeCharsetInfo(type);
// create or replace if already exists
int reqId = Integer.parseInt(requestId);
db.setRequestDefinition(reqId, type.toString(), content);
} catch (SQLException e) {
log.log(Level.SEVERE, "Unable to create request definition", e);
throw new InternalServerErrorException();
} catch( NumberFormatException e ){
log.log(Level.SEVERE, "Unable to parse request id: "+requestId, e);
throw new BadRequestException();
}
}
/**
* List all request available at this broker
*
* @return HTTP 200 with XML representation of all requests
*/
@GET
@Produces(MediaType.APPLICATION_XML)
public RequestList listAllRequests() {
try {
return new RequestList(db.listAllRequests());
} catch (SQLException e) {
log.log(Level.SEVERE, "Unable to read requests", e);
throw new InternalServerErrorException();
}
}
/**
* List filter requests available at this broker via predicate e.g. XPath
*
* @return HTTP 200 with XML representation of all requests
*/
@GET
@Path("filtered")
@Produces(MediaType.APPLICATION_XML)
public RequestList listAllRequests(@QueryParam("type") String type, @QueryParam("predicate") String predicate) {
if( type == null || !(type.endsWith("+xml") || type.endsWith("/xml")) ) {
log.log(Level.WARNING,"Ignoring bad request for filtered type {0}",type);
throw new BadRequestException("type param required ending with +xml or /xml");
}
try {
return new RequestList(db.searchAllRequests(type,"XPath",predicate));
}catch( IllegalArgumentException e ) {
log.log(Level.WARNING,"Filter request failed for predicate "+predicate, e.getCause());
throw new BadRequestException("type param required ending with +xml or /xml");
} catch (IOException e) {
log.log(Level.SEVERE, "Unable to read requests", e);
throw new InternalServerErrorException();
}
}
/**
* Delete a single existing request. HTTP 200 on success
*
* @param id request id to delete
*/
@DELETE
@Path("{id}")
public void deleteRequest(@PathParam("id") String id){
int i;
try{
i = Integer.parseInt(id);
}catch( NumberFormatException e ){
// cannot delete non-numeric request id
final String message = "Unable to delete non-numeric request id: "+id;
log.warning(message);
throw new BadRequestException(message);
}
try {
db.deleteRequest(i);
} catch (SQLException e) {
log.log(Level.SEVERE, "Unable to delete request "+id, e);
throw new InternalServerErrorException();
}
}
/**
* Get a single content representation for the given request which matches the provided Accept header.
*
* @param requestId request id request id
* @param headers headers headers containing acceptable media types
* @return request definition matching the Accept header
* @throws SQLException SQL error
* @throws IOException IO error
* @throws NotFoundException request id does not exist
* @throws NotAcceptableException requested media type not available
*/
@GET
@Path("{id}")
public Response getRequest(@PathParam("id") Integer requestId, @Context HttpHeaders headers) throws SQLException, IOException, NotFoundException, NotAcceptableException{
List<MediaType> accept = headers.getAcceptableMediaTypes();
return getRequest(requestId, accept);
}
/**
* Retrieve available HTTP methods for the given request ID
*
* @param requestId request id
* @return response with [@code Allow} header or 404 if request was not found
* @throws SQLException SQL error
* @throws IOException IO error
*/
@OPTIONS
@Path("{id}")
//@Produces(MediaType.APPLICATION_XML) will cause errors in this case. therefore the media type is set below
public Response getRequestInfo(@PathParam("id") int requestId) throws SQLException, IOException{
// TODO return RequestInfo
RequestInfo info = db.getRequestInfo(requestId);
ResponseBuilder response;
if( info == null ){
response = Response.status(Status.NOT_FOUND);
}else{
response = Response.ok(info, MediaType.APPLICATION_XML_TYPE);
}
return response.allow("GET","PUT","DELETE","OPTIONS").build();
}
/**
* List status information for the specified request for each node
* @param requestId request
* @return status information list
* @throws SQLException SQL error
* @throws IOException IO error
*/
@GET
@Path("{id}/status")
@Produces(MediaType.APPLICATION_XML)
public RequestStatusList getRequestInfo(@PathParam("id") Integer requestId) throws SQLException, IOException{
// TODO return RequestInfo
List<RequestStatusInfo> list = db.listRequestNodeStatus(requestId);
if( list == null ){
throw new NotFoundException();
}else{
return new RequestStatusList(list);
}
}
/**
* Get the targeted nodes for this request. A resource is only returned, if
* the request is limited to / targeted at specifig nodes.
* @param requestId request
* @return target node list
* @throws SQLException SQL error
* @throws IOException IO error
* @throws NotFoundException if the request is not targeted at specific nodes
*/
@GET
@Path("{id}/nodes")
@Produces(MediaType.APPLICATION_XML)
public RequestTargetNodes getRequestTargetNodes(@PathParam("id") Integer requestId) throws SQLException, IOException, NotFoundException{
int[] nodes = db.getRequestTargets(requestId);
if( nodes == null ){
throw new NotFoundException();
}
return new RequestTargetNodes(nodes);
}
/**
* Delete a restriction to certain target nodes. When the delete is successful, the
* request can be retrieved by all nodes.
* @param requestId request id
* @throws SQLException SQL error
* @throws IOException IO error
* @throws NotFoundException request not found or request not targeted at specific nodes
*/
@DELETE
@Path("{id}/nodes")
public void clearRequestTargetNodes(@PathParam("id") Integer requestId) throws SQLException, IOException, NotFoundException{
// TODO check for valid requestId and throw NotFoundException otherwise
int[] nodes = db.getRequestTargets(requestId);
if( nodes == null ){
throw new NotFoundException();
}
db.clearRequestTargets(requestId);
}
/**
* Apply a publication restriction to the request, making it available only to the given nodes.
* Only the specified nodes will be able to retrieve the request.
*
* To clear the restriction, submit an empty restriction list. Without the restriction,
* any node can download the request - also including nodes added in the future.
*
* @param requestId request id
* @param nodes selected list of nodes to which the request should be available
* @throws SQLException SQL error
* @throws IOException IO error
* @throws NotFoundException request not found
*/
@PUT
@Path("{id}/nodes")
@Consumes(MediaType.APPLICATION_XML)
public void setRequestTargetNodes(@PathParam("id") Integer requestId, RequestTargetNodes nodes) throws SQLException, IOException, NotFoundException{
if( nodes == null || nodes.getNodes() == null || nodes.getNodes().length == 0 ){
String message = "node targeting requires at least one node";
log.warning(message);
throw new BadRequestException(message);
}
// TODO replacing / changing nodes deletes all status information for the previous node. Find a way/restrictions to handle this case
// XXX
db.setRequestTargets(requestId, nodes.getNodes());
}
/**
* Retrieve request node status message (e.g. error messages) assigned to a request by a node
*
* @param requestId request id
* @param nodeId node id
* @return error payload
* @throws SQLException SQL error
* @throws IOException IO error
*/
@GET
@Path("{id}/status/{nodeId}")
public Response getRequestNodeStatusMessage(@PathParam("id") Integer requestId, @PathParam("nodeId") Integer nodeId) throws SQLException, IOException{
// TODO set header: timestamp, custom header with status code
Reader r = db.getRequestNodeStatusMessage(requestId, nodeId);
if( r == null ){
throw new NotFoundException();
}
// TODO retrieve and return exact media type
return Response.ok(r, MediaType.TEXT_PLAIN).build();
}
/**
* Publish the given request. After being published, the request can be retrieved
* by client nodes.
*
* @param requestId request id
* @throws SQLException SQL error
*/
@POST
@Path("{id}/publish")
public void publishRequest(@PathParam("id") Integer requestId) throws SQLException{
// find query
RequestInfo info = db.getRequestInfo(requestId);
if( info == null ){
// 404 if not found
throw new NotFoundException();
}else if( info.published != null ){
; // already published, nothing to do
}else{
// TODO use timestamp from headers for future publishing
// update published timestamp
db.setRequestPublished(requestId, Instant.now());
// broadcast to connected websocket clients
int[] nodeIds = null;
if( info.targeted ) {
// notify only selected nodes
nodeIds = db.getRequestTargets(requestId);
}
MyBrokerWebsocket.broadcastRequestPublished(requestId, nodeIds);
RequestAdminWebsocket.broadcastRequestPublished(requestId);
}
}
/**
* Mark a request as closed.
* The client nodes will stop processing requests in closed state.
*
* @param requestId request id
* @throws SQLException SQL error
*/
@POST
@Path("{id}/close")
public void closeRequest(@PathParam("id") Integer requestId) throws SQLException{
// find query
RequestInfo info = db.getRequestInfo(requestId);
if( info == null ){
// 404 if not found
throw new NotFoundException();
}else if( info.closed != null ){
; // already closed, nothing to do
}else{
// update published timestamp
db.setRequestClosed(requestId, Instant.now());
// broadcast to connected websocket clients
int[] nodeIds = null;
if( info.targeted ) {
// notify only selected nodes
nodeIds = db.getRequestTargets(requestId);
}
MyBrokerWebsocket.broadcastRequestClosed(requestId, nodeIds);
RequestAdminWebsocket.broadcastRequestClosed(requestId);
}
}
@Override
protected RequestTypeManager getTypeManager() {
return typeManager;
}
@Override
protected BrokerBackend getBroker() {
return db;
}
}
| aktin/broker | broker-server/src/main/java/org/aktin/broker/rest/RequestAdminEndpoint.java | Java | apache-2.0 | 14,961 |
/*
* Copyright 2006-2012 The Scriptella Project Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scriptella.driver.text;
import scriptella.spi.ConnectionParameters;
import scriptella.text.PropertyFormatInfo;
import scriptella.text.PropertyFormatter;
import java.net.URL;
/**
* Connection parameters for text drivers.
*
* @author Fyodor Kupolov
* @version 1.1
*/
public class TextConnectionParameters {
private ConnectionParameters connectionParameters;
private String encoding;
private boolean flush;
private URL url; //if null - use console
private String eol;
private int skipLines;
private boolean trimLines;
private PropertyFormatter propertyFormatter;
protected TextConnectionParameters() {
encoding = null;
flush = false;
url = null;
eol = "\n";
this.skipLines = 0;
trimLines = true;
this.propertyFormatter = new PropertyFormatter(PropertyFormatInfo.createEmpty());
}
public TextConnectionParameters(ConnectionParameters parameters) {
this.connectionParameters = parameters;
//URL can be set null, in this case console is used for reading/writing
url = parameters.getUrl() == null ? null : parameters.getResolvedUrl();
encoding = parameters.getCharsetProperty(AbstractTextConnection.ENCODING);
//When printing to console - flushing is enabled
flush = url == null || parameters.getBooleanProperty(AbstractTextConnection.FLUSH, false);
String eolStr = parameters.getStringProperty(TextConnection.EOL);
eol = eolStr != null ? eolStr : "\n";//Default value
skipLines = parameters.getIntegerProperty(AbstractTextConnection.SKIP_LINES, 0);
trimLines = parameters.getBooleanProperty(AbstractTextConnection.TRIM, true);
propertyFormatter = new PropertyFormatter(PropertyFormatInfo.parse(parameters, AbstractTextConnection.FORMAT_PREFIX));
}
public ConnectionParameters getConnectionParameters() {
return connectionParameters;
}
public String getEncoding() {
return encoding;
}
public boolean isFlush() {
return flush;
}
/**
* Returns resolved URL for this connection.
* <p>If null, the console is used for reading/writing.
*
* @return resolved URL or null.
*/
public URL getUrl() {
return url;
}
public String getEol() {
return eol;
}
public int getSkipLines() {
return skipLines;
}
public PropertyFormatter getPropertyFormatter() {
return propertyFormatter;
}
public void setDefaultNullString(String nullString) {
getPropertyFormatter().getFormatInfo().getDefaultFormat().setNullString(nullString);
}
public void setEncoding(String encoding) {
this.encoding = encoding;
}
public void setFlush(boolean flush) {
this.flush = flush;
}
public void setEol(String eol) {
this.eol = eol;
}
public void setSkipLines(int skipLines) {
this.skipLines = skipLines;
}
public boolean isTrimLines() {
return trimLines;
}
public void setTrimLines(boolean trimLines) {
this.trimLines = trimLines;
}
}
| scriptella/scriptella-etl | drivers/src/java/scriptella/driver/text/TextConnectionParameters.java | Java | apache-2.0 | 3,782 |
package de.endrullis.idea.postfixtemplates.templates;
/**
* Special types representing groups of Java types/classes.
*
* @author Stefan Endrullis <stefan@endrullis.de>
*/
public enum SpecialType {
ANY, VOID, NON_VOID, ARRAY, BOOLEAN, ITERABLE_OR_ARRAY, NOT_PRIMITIVE, NUMBER, CLASS,
//FIELD, LOCAL_VARIABLE, VARIABLE, ASSIGNMENT,
BYTE, SHORT, CHAR, INT, LONG, FLOAT, DOUBLE,
BYTE_LITERAL, SHORT_LITERAL, CHAR_LITERAL, INT_LITERAL, LONG_LITERAL, FLOAT_LITERAL, DOUBLE_LITERAL,
NUMBER_LITERAL, STRING_LITERAL,
}
| xylo/intellij-postfix-templates | src/de/endrullis/idea/postfixtemplates/templates/SpecialType.java | Java | apache-2.0 | 530 |
<?php
require_once('Autoload.php');
class SQLAuthTest extends PHPUnit\Framework\TestCase
{
public function testSQLAuthenticator()
{
$GLOBALS['FLIPSIDE_SETTINGS_LOC'] = './tests/helpers';
if(!isset(FlipsideSettings::$dataset['auth']))
{
$params = array('dsn'=>'mysql:host=localhost;dbname=auth', 'host'=>'localhost', 'user'=>'root', 'pass'=>'');
FlipsideSettings::$dataset['auth'] = array('type'=>'SQLDataSet', 'params'=>$params);
}
$dataSet = \Flipside\DataSetFactory::getDataSetByName('auth');
$dataSet->raw_query('DROP TABLE user;');
$dataSet->raw_query('DROP TABLE tblgroup;');
$dataSet->raw_query('CREATE TABLE user (uid VARCHAR(255), pass VARCHAR(255));');
$dataSet->raw_query('CREATE TABLE tblgroup (gid VARCHAR(255), uid VARCHAR(255), description VARCHAR(255));');
$params = array('current'=>true, 'pending'=>false, 'supplement'=>false, 'current_data_set'=>'auth');
$auth = new \Flipside\Auth\SQLAuthenticator($params);
$this->assertFalse($auth->login('test', 'test'));
$dataSet->raw_query('INSERT INTO user VALUES (\'test\', \'$2y$10$bBzajdH12NSC9MOmMldfxOlozTKSS7Dyl3apWhyO53/KobKtHkoES\');');
$res = $auth->login('test', 'test');
$this->assertNotFalse($res);
$this->assertFalse($auth->login('test', 'test1'));
$this->assertTrue($auth->isLoggedIn($res));
$this->assertFalse($auth->isLoggedIn(false));
$user = $auth->getUser($res);
$this->assertInstanceOf('Flipside\Auth\SQLUser', $user);
$this->assertEquals('test', $user->uid);
$user = $auth->getUserByName('test');
$this->assertInstanceOf('Flipside\Auth\SQLUser', $user);
$this->assertEquals('test', $user->uid);
$user = $auth->getUserByName('test1');
$this->assertNull($user);
$group = $auth->getGroupByName('test');
$this->assertNull($group);
$dataSet->raw_query('INSERT INTO tblgroup VALUES (\'test\', \'test\', \'Test Group\');');
$group = $auth->getGroupByName('test');
$this->assertNotFalse($group);
$this->assertInstanceOf('Flipside\Auth\SQLGroup', $group);
$user = $auth->getUserByName('test');
$this->assertTrue($user->isInGroupNamed('test'));
$this->assertFalse($user->isInGroupNamed('test1'));
$user->mail = 'test@test.com';
$pendingUser = new \Flipside\Auth\PendingUser();
$pendingUser->uid = 'test1';
$pendingUser->mail = 'test@test.com';
$pendingUser->sn = 'User';
$pendingUser->givenName = 'Test';
$pendingUser->host = 'test.com';
$user = $auth->activatePendingUser($pendingUser);
$this->assertFalse($user);
$this->assertEquals(0, $auth->getPendingUserCount());
}
/**
* @depends testSQLAuthenticator
*/
public function testFunctionsNonCurrent()
{
$params = array();
$params['current'] = false;
$params['pending'] = false;
$params['supplement'] = false;
$params['current_data_set'] = 'auth';
$auth = new \Flipside\Auth\SQLAuthenticator($params);
$this->assertFalse($auth->login('test', 'test'));
}
/**
* @depends testSQLAuthenticator
*/
public function testPending()
{
$GLOBALS['FLIPSIDE_SETTINGS_LOC'] = './tests/helpers';
if(!isset(FlipsideSettings::$dataset['pending_auth']))
{
$params = array('dsn'=>'mysql:host=localhost;dbname=p_auth', 'host'=>'localhost', 'user'=>'root', 'pass'=>'');
FlipsideSettings::$dataset['pending_auth'] = array('type'=>'SQLDataSet', 'params'=>$params);
}
$params = array();
$params['current'] = false;
$params['pending'] = true;
$params['supplement'] = false;
$params['pending_data_set'] = 'pending_auth';
$auth = new \Flipside\Auth\SQLAuthenticator($params);
$dataSet = \Flipside\DataSetFactory::getDataSetByName('pending_auth');
$dataSet->raw_query('DROP TABLE users;');
$dataSet->raw_query('CREATE TABLE users (hash VARCHAR(255), data VARCHAR(4096), time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, UNIQUE KEY `hash` (`hash`));');
$pendingUser = new \Flipside\Auth\PendingUser();
$pendingUser->uid = 'test1';
$pendingUser->mail = 'test@test.com';
$pendingUser->sn = 'User';
$pendingUser->givenName = 'Test';
$pendingUser->host = 'test.com';
$user = $auth->createPendingUser($pendingUser);
$this->assertNotFalse($user);
$this->assertGreaterThan(0, $auth->getPendingUserCount());
$tmpUser = $auth->getTempUserByHash('1234567890');
$this->assertFalse($tmpUser);
$users = $dataSet['users']->read();
$tmpUser = $auth->getTempUserByHash($users[0]['hash']);
$this->assertNotFalse($tmpUser);
}
/**
* @requires PHP 5.4
*/
public function testHash()
{
if(version_compare(PHP_VERSION, '5.5.0', '<'))
{
require_once('Auth/SQLAuthenticator.php');
$hash = @\Flipside\Auth\password_hash('test');
$this->assertNotFalse($hash);
$this->assertTrue(\Flipside\Auth\password_verify('test', $hash));
}
$this->assertNotFalse(true);
}
}
/* vim: set tabstop=4 shiftwidth=4 expandtab: */
| BurningFlipside/CommonCode | tests/travis/SQLAuthTest.php | PHP | apache-2.0 | 5,486 |
<?php
App::uses('ChatAppModel', 'Chat.Model');
/**
* QuizAnswersQuizQuestionsUser Model
*
* @property QuizAnswer $QuizAnswer
* @property QuizQuestion $QuizQuestion
* @property User $User
*/
class QuizAnswersQuizQuestionsUser extends ChatAppModel {
//The Associations below have been created with all possible keys, those that are not needed can be removed
/**
* belongsTo associations
*
* @var array
*/
public $belongsTo = array(
'QuizAnswer' => array(
'className' => 'Chat.QuizAnswer',
'foreignKey' => 'quiz_answer_id',
'conditions' => '',
'fields' => '',
'order' => ''
),
'QuizQuestion' => array(
'className' => 'Chat.QuizQuestion',
'foreignKey' => 'quiz_question_id',
'conditions' => '',
'fields' => '',
'order' => ''
),
'User' => array(
'className' => 'User',
'foreignKey' => 'user_id',
'conditions' => '',
'fields' => '',
'order' => ''
)
);
}
| phlyper/cakesky | Plugin/Chat/Model/QuizAnswersQuizQuestionsUser.php | PHP | apache-2.0 | 921 |
import multiprocessing
import threading
import uuid
import random
import time
import os
import signal
import traceback
from memsql_loader.api import shared
from memsql_loader.db import connection_wrapper, pool
from memsql_loader.loader_db.tasks import Tasks
from memsql_loader.loader_db.jobs import Jobs
from memsql_loader.execution.errors import WorkerException, ConnectionException, RequeueTask
from memsql_loader.execution.loader import Loader
from memsql_loader.execution.downloader import Downloader
from memsql_loader.util import db_utils, log
from memsql_loader.util.fifo import FIFO
from memsql_loader.util.apsw_sql_step_queue.errors import APSWSQLStepQueueException, TaskDoesNotExist
HUNG_DOWNLOADER_TIMEOUT = 3600
class ExitingException(Exception):
pass
class Worker(multiprocessing.Process):
def __init__(self, worker_sleep, parent_pid, worker_lock):
self.worker_id = uuid.uuid1().hex[:8]
self.worker_sleep = worker_sleep
self.worker_lock = worker_lock
self.worker_working = multiprocessing.Value('i', 1)
self.parent_pid = parent_pid
self._exit_evt = multiprocessing.Event()
self.logger = log.get_logger('worker[%s]' % self.worker_id)
super(Worker, self).__init__(name=('worker-%s' % self.worker_id))
def kill_query_if_exists(self, conn_args, conn_id):
with pool.get_connection(database='information_schema', **conn_args) as conn:
id_row = conn.query("SELECT id FROM processlist WHERE info LIKE '%%LOAD DATA%%' AND id=%s", conn_id)
if len(id_row) > 0:
# Since this is a LOAD DATA LOCAL query, we need to kill the
# connection, not the query, since LOAD DATA LOCAL queries
# don't end until the file is fully read, even if they're
# killed.
db_utils.try_kill_connection(conn, conn_id)
def kill_delete_query_if_exists(self, conn_args, conn_id):
with pool.get_connection(database='information_schema', **conn_args) as conn:
id_row = conn.query("SELECT id FROM processlist WHERE info LIKE '%%DELETE%%' AND id=%s", conn_id)
if len(id_row) > 0:
db_utils.try_kill_query(conn, conn_id)
def signal_exit(self):
self._exit_evt.set()
def is_working(self):
return self.worker_working.value == 1
def run(self):
self.jobs = Jobs()
self.tasks = Tasks()
task = None
ignore = lambda *args, **kwargs: None
signal.signal(signal.SIGINT, ignore)
signal.signal(signal.SIGQUIT, ignore)
try:
while not self.exiting():
time.sleep(random.random() * 0.5)
task = self.tasks.start()
if task is None:
self.worker_working.value = 0
else:
self.worker_working.value = 1
job_id = task.job_id
job = self.jobs.get(job_id)
old_conn_id = task.data.get('conn_id', None)
if old_conn_id is not None:
self.kill_query_if_exists(job.spec.connection, old_conn_id)
self.logger.info('Task %d: starting' % task.task_id)
try:
# can't use a pooled connection due to transactions staying open in the
# pool on failure
with pool.get_connection(database=job.spec.target.database, pooled=False, **job.spec.connection) as db_connection:
db_connection.execute("BEGIN")
self._process_task(task, db_connection)
self.logger.info('Task %d: finished with success', task.task_id)
except (RequeueTask, ConnectionException):
self.logger.info('Task %d: download failed, requeueing', task.task_id)
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
task.requeue()
except TaskDoesNotExist as e:
self.logger.info('Task %d: finished with error, the task was either cancelled or deleted', task.task_id)
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
except WorkerException as e:
task.error(str(e))
self.logger.info('Task %d: finished with error', task.task_id)
except Exception as e:
self.logger.debug("Traceback: %s" % (traceback.format_exc()))
raise
raise ExitingException()
except ExitingException:
self.logger.debug('Worker exiting')
if task is not None and not task.valid():
try:
task.requeue()
except APSWSQLStepQueueException:
pass
def _process_task(self, task, db_connection):
job_id = task.job_id
job = self.jobs.get(job_id)
if job is None:
raise WorkerException('Failed to find job with ID %s' % job_id)
# If this is a gzip file, we add .gz to the named pipe's name so that
# MemSQL knows to decompress it unless we're piping this into a script,
# in which case we do the decompression here in-process.
if job.spec.options.script is not None:
gzip = False
else:
gzip = task.data['key_name'].endswith('.gz')
fifo = FIFO(gzip=gzip)
# reduces the chance of synchronization between workers by
# initially sleeping in the order they were started and then
# randomly sleeping after that point
time.sleep(self.worker_sleep)
self.worker_sleep = 0.5 * random.random()
if self.exiting() or not task.valid():
raise ExitingException()
if job.has_file_id():
if self._should_delete(job, task):
self.logger.info('Waiting for DELETE lock before cleaning up rows from an earlier load')
try:
while not self.worker_lock.acquire(block=True, timeout=0.5):
if self.exiting() or not task.valid():
raise ExitingException()
task.ping()
self.logger.info('Attempting cleanup of rows from an earlier load')
num_deleted = self._delete_existing_rows(db_connection, job, task)
self.logger.info('Deleted %s rows during cleanup' % num_deleted)
finally:
try:
self.worker_lock.release()
except ValueError:
# This is raised if we didn't acquire the lock (e.g. if
# there was a KeyboardInterrupt before we acquired the
# lock above. In this case, we don't need to
# release the lock.
pass
if self.exiting() or not task.valid():
raise ExitingException()
downloader = Downloader()
downloader.load(job, task, fifo)
loader = Loader()
loader.load(job, task, fifo, db_connection)
loader.start()
downloader.start()
try:
while not self.exiting():
time.sleep(0.5)
with task.protect():
self._update_task(task, downloader)
task.save()
if downloader.is_alive() and time.time() > downloader.metrics.last_change + HUNG_DOWNLOADER_TIMEOUT:
# downloader has frozen, and the progress handler froze as well
self.logger.error("Detected hung downloader. Trying to exit.")
self.signal_exit()
loader_alive = loader.is_alive()
downloader_alive = downloader.is_alive()
if not loader_alive or not downloader_alive:
if loader.error or downloader.error:
# We want to make sure that in the case of simultaneous
# exceptions, we see both before deciding what to do
time.sleep(3)
# Only exit if at least 1 error or both are not alive
elif not loader_alive and not downloader_alive:
break
else:
continue
loader_error = loader.error
loader_tb = loader.traceback
downloader_error = downloader.error
downloader_tb = downloader.traceback
any_requeue_task = isinstance(loader_error, RequeueTask) or isinstance(downloader_error, RequeueTask)
loader_worker_exception = isinstance(loader_error, WorkerException)
downloader_worker_exception = isinstance(downloader_error, WorkerException)
# If we have any RequeueTasks, then requeue
if any_requeue_task:
raise RequeueTask()
# Raise the earlier exception
elif loader_worker_exception and downloader_worker_exception:
if loader_error.time < downloader_error.time:
raise loader_error, None, loader_tb
else:
raise downloader_error, None, downloader_tb
# If they're both exceptions but one of them isn't a WorkerException
elif (downloader_error and loader_error) and (loader_worker_exception or downloader_worker_exception):
if not loader_worker_exception:
raise loader_error, None, loader_tb
else:
raise downloader_error, None, downloader_tb
# We don't have any WorkerExceptions, raise a random one
# Also handles the case where only one exception is raised
elif downloader_error or loader_error:
raise downloader_error or loader_error, None, downloader_tb or loader_tb
else:
assert False, 'Program should only reach this conditional block if at least one error exists'
finally:
if downloader.is_alive():
downloader.terminate()
self.logger.info('Waiting for threads to exit...')
while downloader.is_alive() or loader.is_alive():
loader.join(5)
downloader.join(5)
if task.valid():
task.ping()
if self.exiting():
raise ExitingException()
with task.protect():
db_connection.execute("COMMIT")
self._update_task(task, downloader)
task.finish('success')
def _should_delete(self, job, task):
competing_job_ids = ["'%s'" % j.id for j in self.jobs.query_target(job.spec.connection.host, job.spec.connection.port, job.spec.target.database, job.spec.target.table)]
predicate_sql = "file_id = :file_id and job_id in (%s)" % ','.join(competing_job_ids)
matching = self.tasks.get_tasks_in_state(
[ shared.TaskState.SUCCESS ],
extra_predicate=(predicate_sql, { 'file_id': task.file_id }))
return len(matching) > 0
def _delete_existing_rows(self, conn, job, task):
file_id = task.file_id
sql = {
'database_name': job.spec.target.database,
'table_name': job.spec.target.table,
'file_id_column': job.spec.options.file_id_column
}
thread_ctx = {
'num_deleted': 0,
'exception': None
}
def _run_delete_query():
try:
thread_ctx['num_deleted'] = conn.query('''
DELETE FROM `%(database_name)s`.`%(table_name)s`
WHERE `%(file_id_column)s` = %%s
''' % sql, file_id)
except connection_wrapper.ConnectionWrapperException as e:
self.logger.error(
'Connection error when cleaning up rows: %s', str(e))
thread_ctx['exception'] = RequeueTask()
except pool.MySQLError as e:
errno, msg = e.args
msg = 'Error when cleaning up rows (%d): %s' % (errno, msg)
self.logger.error(msg)
thread_ctx['exception'] = RequeueTask()
except Exception as e:
thread_ctx['exception'] = e
t = threading.Thread(target=_run_delete_query)
t.start()
while not self.exiting() and task.valid():
try:
# Ping the task to let the SQL queue know that it's still active.
task.ping()
except TaskDoesNotExist:
# The task might have gotten cancelled between when we checked
# whether it's valid and when we ping() it. If ping() fails and
# it has been cancelled in between, then we should proceed with
# killing the delete query if it exists
continue
if not t.is_alive():
break
time.sleep(0.5)
else:
# delete thread didn't finish on its own
self.kill_delete_query_if_exists(job.spec.connection, conn.thread_id())
t.join()
exc = thread_ctx['exception']
if exc is not None:
raise exc
return thread_ctx['num_deleted']
def _update_task(self, task, downloader):
stats = downloader.metrics.get_stats()
task.bytes_downloaded = stats['bytes_downloaded']
task.download_rate = stats['download_rate']
task.data['time_left'] = stats['time_left']
def exiting(self):
try:
os.kill(self.parent_pid, 0)
except OSError:
# parent process does not exist, exit immediately
return True
return self._exit_evt.is_set()
| memsql/memsql-loader | memsql_loader/execution/worker.py | Python | apache-2.0 | 14,261 |
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import traceback
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
import logging
log = logging.getLogger(__name__)
import os
from threading import Event
import six
from subprocess import call
from itertools import groupby
from cassandra.cluster import Cluster
try:
from ccmlib.cluster import Cluster as CCMCluster
from ccmlib.cluster_factory import ClusterFactory as CCMClusterFactory
from ccmlib import common
except ImportError as e:
CCMClusterFactory = None
CLUSTER_NAME = 'test_cluster'
SINGLE_NODE_CLUSTER_NAME = 'single_node'
MULTIDC_CLUSTER_NAME = 'multidc_test_cluster'
CCM_CLUSTER = None
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ccm')
if not os.path.exists(path):
os.mkdir(path)
cass_version = None
cql_version = None
def get_server_versions():
"""
Probe system.local table to determine Cassandra and CQL version.
Returns a tuple of (cassandra_version, cql_version).
"""
global cass_version, cql_version
if cass_version is not None:
return (cass_version, cql_version)
c = Cluster(protocol_version=1)
s = c.connect()
row = s.execute('SELECT cql_version, release_version FROM system.local')[0]
cass_version = _tuple_version(row.release_version)
cql_version = _tuple_version(row.cql_version)
c.shutdown()
return (cass_version, cql_version)
def _tuple_version(version_string):
if '-' in version_string:
version_string = version_string[:version_string.index('-')]
return tuple([int(p) for p in version_string.split('.')])
USE_CASS_EXTERNAL = bool(os.getenv('USE_CASS_EXTERNAL', False))
default_cassandra_version = '2.1.3'
if USE_CASS_EXTERNAL:
if CCMClusterFactory:
# see if the external instance is running in ccm
path = common.get_default_path()
name = common.current_cluster_name(path)
CCM_CLUSTER = CCMClusterFactory.load(common.get_default_path(), name)
CCM_CLUSTER.start(wait_for_binary_proto=True, wait_other_notice=True)
# Not sure what's going on, but the server version query
# hangs in python3. This appears to be related to running inside of
# nosetests, and only for this query that would run while loading the
# module.
# This is a hack to make it run with default cassandra version for PY3.
# Not happy with it, but need to move on for now.
if not six.PY3:
cass_ver, _ = get_server_versions()
default_cassandra_version = '.'.join('%d' % i for i in cass_ver)
else:
if not os.getenv('CASSANDRA_VERSION'):
log.warning("Using default C* version %s because external server cannot be queried" % default_cassandra_version)
CASSANDRA_DIR = os.getenv('CASSANDRA_DIR', None)
CASSANDRA_VERSION = os.getenv('CASSANDRA_VERSION', default_cassandra_version)
CCM_KWARGS = {}
if CASSANDRA_DIR:
log.info("Using Cassandra dir: %s", CASSANDRA_DIR)
CCM_KWARGS['install_dir'] = CASSANDRA_DIR
else:
log.info('Using Cassandra version: %s', CASSANDRA_VERSION)
CCM_KWARGS['version'] = CASSANDRA_VERSION
if CASSANDRA_VERSION > '2.1':
default_protocol_version = 3
elif CASSANDRA_VERSION > '2.0':
default_protocol_version = 2
else:
default_protocol_version = 1
PROTOCOL_VERSION = int(os.getenv('PROTOCOL_VERSION', default_protocol_version))
def get_cluster():
return CCM_CLUSTER
def get_node(node_id):
return CCM_CLUSTER.nodes['node%s' % node_id]
def use_multidc(dc_list):
use_cluster(MULTIDC_CLUSTER_NAME, dc_list, start=True)
def use_singledc(start=True):
use_cluster(CLUSTER_NAME, [3], start=start)
def use_single_node(start=True):
use_cluster(SINGLE_NODE_CLUSTER_NAME, [1], start=start)
def remove_cluster():
if USE_CASS_EXTERNAL:
return
global CCM_CLUSTER
if CCM_CLUSTER:
log.debug("removing cluster %s", CCM_CLUSTER.name)
CCM_CLUSTER.remove()
CCM_CLUSTER = None
def is_current_cluster(cluster_name, node_counts):
global CCM_CLUSTER
if CCM_CLUSTER and CCM_CLUSTER.name == cluster_name:
if [len(list(nodes)) for dc, nodes in
groupby(CCM_CLUSTER.nodelist(), lambda n: n.data_center)] == node_counts:
return True
return False
def use_cluster(cluster_name, nodes, ipformat=None, start=True):
global CCM_CLUSTER
if USE_CASS_EXTERNAL:
if CCM_CLUSTER:
log.debug("Using external ccm cluster %s", CCM_CLUSTER.name)
else:
log.debug("Using unnamed external cluster")
return
if is_current_cluster(cluster_name, nodes):
log.debug("Using existing cluster %s", cluster_name)
return
if CCM_CLUSTER:
log.debug("Stopping cluster %s", CCM_CLUSTER.name)
CCM_CLUSTER.stop()
try:
try:
cluster = CCMClusterFactory.load(path, cluster_name)
log.debug("Found existing ccm %s cluster; clearing", cluster_name)
cluster.clear()
cluster.set_install_dir(**CCM_KWARGS)
except Exception:
log.debug("Creating new ccm %s cluster with %s", cluster_name, CCM_KWARGS)
cluster = CCMCluster(path, cluster_name, **CCM_KWARGS)
cluster.set_configuration_options({'start_native_transport': True})
common.switch_cluster(path, cluster_name)
cluster.populate(nodes, ipformat=ipformat)
if start:
log.debug("Starting ccm %s cluster", cluster_name)
cluster.start(wait_for_binary_proto=True, wait_other_notice=True)
setup_test_keyspace(ipformat=ipformat)
CCM_CLUSTER = cluster
except Exception:
log.exception("Failed to start ccm cluster. Removing cluster.")
remove_cluster()
call(["pkill", "-9", "-f", ".ccm"])
raise
def teardown_package():
if USE_CASS_EXTERNAL:
return
# when multiple modules are run explicitly, this runs between them
# need to make sure CCM_CLUSTER is properly cleared for that case
remove_cluster()
for cluster_name in [CLUSTER_NAME, MULTIDC_CLUSTER_NAME]:
try:
cluster = CCMClusterFactory.load(path, cluster_name)
try:
cluster.remove()
log.info('Removed cluster: %s' % cluster_name)
except Exception:
log.exception('Failed to remove cluster: %s' % cluster_name)
except Exception:
log.warn('Did not find cluster: %s' % cluster_name)
def setup_test_keyspace(ipformat=None):
# wait for nodes to startup
time.sleep(10)
if not ipformat:
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
else:
cluster = Cluster(contact_points=["::1"], protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
try:
results = session.execute("SELECT keyspace_name FROM system.schema_keyspaces")
existing_keyspaces = [row[0] for row in results]
for ksname in ('test1rf', 'test2rf', 'test3rf'):
if ksname in existing_keyspaces:
session.execute("DROP KEYSPACE %s" % ksname)
ddl = '''
CREATE KEYSPACE test3rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '3'}'''
session.execute(ddl)
ddl = '''
CREATE KEYSPACE test2rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '2'}'''
session.execute(ddl)
ddl = '''
CREATE KEYSPACE test1rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}'''
session.execute(ddl)
ddl = '''
CREATE TABLE test3rf.test (
k int PRIMARY KEY,
v int )'''
session.execute(ddl)
except Exception:
traceback.print_exc()
raise
finally:
cluster.shutdown()
class UpDownWaiter(object):
def __init__(self, host):
self.down_event = Event()
self.up_event = Event()
host.monitor.register(self)
def on_up(self, host):
self.up_event.set()
def on_down(self, host):
self.down_event.set()
def wait_for_down(self):
self.down_event.wait()
def wait_for_up(self):
self.up_event.wait()
| sontek/python-driver | tests/integration/__init__.py | Python | apache-2.0 | 8,900 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/securityhub/model/AwsEc2SecurityGroupDetails.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace SecurityHub
{
namespace Model
{
AwsEc2SecurityGroupDetails::AwsEc2SecurityGroupDetails() :
m_groupNameHasBeenSet(false),
m_groupIdHasBeenSet(false),
m_ownerIdHasBeenSet(false),
m_vpcIdHasBeenSet(false),
m_ipPermissionsHasBeenSet(false),
m_ipPermissionsEgressHasBeenSet(false)
{
}
AwsEc2SecurityGroupDetails::AwsEc2SecurityGroupDetails(JsonView jsonValue) :
m_groupNameHasBeenSet(false),
m_groupIdHasBeenSet(false),
m_ownerIdHasBeenSet(false),
m_vpcIdHasBeenSet(false),
m_ipPermissionsHasBeenSet(false),
m_ipPermissionsEgressHasBeenSet(false)
{
*this = jsonValue;
}
AwsEc2SecurityGroupDetails& AwsEc2SecurityGroupDetails::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("GroupName"))
{
m_groupName = jsonValue.GetString("GroupName");
m_groupNameHasBeenSet = true;
}
if(jsonValue.ValueExists("GroupId"))
{
m_groupId = jsonValue.GetString("GroupId");
m_groupIdHasBeenSet = true;
}
if(jsonValue.ValueExists("OwnerId"))
{
m_ownerId = jsonValue.GetString("OwnerId");
m_ownerIdHasBeenSet = true;
}
if(jsonValue.ValueExists("VpcId"))
{
m_vpcId = jsonValue.GetString("VpcId");
m_vpcIdHasBeenSet = true;
}
if(jsonValue.ValueExists("IpPermissions"))
{
Array<JsonView> ipPermissionsJsonList = jsonValue.GetArray("IpPermissions");
for(unsigned ipPermissionsIndex = 0; ipPermissionsIndex < ipPermissionsJsonList.GetLength(); ++ipPermissionsIndex)
{
m_ipPermissions.push_back(ipPermissionsJsonList[ipPermissionsIndex].AsObject());
}
m_ipPermissionsHasBeenSet = true;
}
if(jsonValue.ValueExists("IpPermissionsEgress"))
{
Array<JsonView> ipPermissionsEgressJsonList = jsonValue.GetArray("IpPermissionsEgress");
for(unsigned ipPermissionsEgressIndex = 0; ipPermissionsEgressIndex < ipPermissionsEgressJsonList.GetLength(); ++ipPermissionsEgressIndex)
{
m_ipPermissionsEgress.push_back(ipPermissionsEgressJsonList[ipPermissionsEgressIndex].AsObject());
}
m_ipPermissionsEgressHasBeenSet = true;
}
return *this;
}
JsonValue AwsEc2SecurityGroupDetails::Jsonize() const
{
JsonValue payload;
if(m_groupNameHasBeenSet)
{
payload.WithString("GroupName", m_groupName);
}
if(m_groupIdHasBeenSet)
{
payload.WithString("GroupId", m_groupId);
}
if(m_ownerIdHasBeenSet)
{
payload.WithString("OwnerId", m_ownerId);
}
if(m_vpcIdHasBeenSet)
{
payload.WithString("VpcId", m_vpcId);
}
if(m_ipPermissionsHasBeenSet)
{
Array<JsonValue> ipPermissionsJsonList(m_ipPermissions.size());
for(unsigned ipPermissionsIndex = 0; ipPermissionsIndex < ipPermissionsJsonList.GetLength(); ++ipPermissionsIndex)
{
ipPermissionsJsonList[ipPermissionsIndex].AsObject(m_ipPermissions[ipPermissionsIndex].Jsonize());
}
payload.WithArray("IpPermissions", std::move(ipPermissionsJsonList));
}
if(m_ipPermissionsEgressHasBeenSet)
{
Array<JsonValue> ipPermissionsEgressJsonList(m_ipPermissionsEgress.size());
for(unsigned ipPermissionsEgressIndex = 0; ipPermissionsEgressIndex < ipPermissionsEgressJsonList.GetLength(); ++ipPermissionsEgressIndex)
{
ipPermissionsEgressJsonList[ipPermissionsEgressIndex].AsObject(m_ipPermissionsEgress[ipPermissionsEgressIndex].Jsonize());
}
payload.WithArray("IpPermissionsEgress", std::move(ipPermissionsEgressJsonList));
}
return payload;
}
} // namespace Model
} // namespace SecurityHub
} // namespace Aws
| cedral/aws-sdk-cpp | aws-cpp-sdk-securityhub/source/model/AwsEc2SecurityGroupDetails.cpp | C++ | apache-2.0 | 4,284 |
// Copyright (c) 2017-2021 Tigera, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package infrastructure
import (
"fmt"
"os"
"path"
. "github.com/onsi/gomega"
log "github.com/sirupsen/logrus"
"github.com/projectcalico/felix/fv/containers"
"github.com/projectcalico/felix/fv/tcpdump"
"github.com/projectcalico/felix/fv/utils"
)
// FIXME: isolate individual Felix instances in their own cgroups. Unfortunately, this doesn't work on systems that are using cgroupv1
// see https://elixir.bootlin.com/linux/v5.3.11/source/include/linux/cgroup-defs.h#L788 for explanation.
const CreateCgroupV2 = false
type Felix struct {
*containers.Container
// ExpectedIPIPTunnelAddr contains the IP that the infrastructure expects to
// get assigned to the IPIP tunnel. Filled in by AddNode().
ExpectedIPIPTunnelAddr string
// ExpectedVXLANTunnelAddr contains the IP that the infrastructure expects to
// get assigned to the VXLAN tunnel. Filled in by AddNode().
ExpectedVXLANTunnelAddr string
// ExpectedWireguardTunnelAddr contains the IP that the infrastructure expects to
// get assigned to the Wireguard tunnel. Filled in by AddNode().
ExpectedWireguardTunnelAddr string
// IP of the Typha that this Felix is using (if any).
TyphaIP string
// If sets, acts like an external IP of a node. Filled in by AddNode().
// XXX setup routes
ExternalIP string
startupDelayed bool
}
func (f *Felix) GetFelixPID() int {
if f.startupDelayed {
log.Panic("GetFelixPID() called but startup is delayed")
}
return f.GetSinglePID("calico-felix")
}
func (f *Felix) GetFelixPIDs() []int {
if f.startupDelayed {
log.Panic("GetFelixPIDs() called but startup is delayed")
}
return f.GetPIDs("calico-felix")
}
func (f *Felix) TriggerDelayedStart() {
if !f.startupDelayed {
log.Panic("TriggerDelayedStart() called but startup wasn't delayed")
}
f.Exec("touch", "/start-trigger")
f.startupDelayed = false
}
func RunFelix(infra DatastoreInfra, id int, options TopologyOptions) *Felix {
log.Info("Starting felix")
ipv6Enabled := fmt.Sprint(options.EnableIPv6)
args := infra.GetDockerArgs()
args = append(args, "--privileged")
// Collect the environment variables for starting this particular container. Note: we
// are called concurrently with other instances of RunFelix so it's important to only
// read from options.*.
envVars := map[string]string{
// Enable core dumps.
"GOTRACEBACK": "crash",
"GORACE": "history_size=2",
// Tell the wrapper to set the core file name pattern so we can find the dump.
"SET_CORE_PATTERN": "true",
"FELIX_LOGSEVERITYSCREEN": options.FelixLogSeverity,
"FELIX_PROMETHEUSMETRICSENABLED": "true",
"FELIX_BPFLOGLEVEL": "debug",
"FELIX_USAGEREPORTINGENABLED": "false",
"FELIX_IPV6SUPPORT": ipv6Enabled,
// Disable log dropping, because it can cause flakes in tests that look for particular logs.
"FELIX_DEBUGDISABLELOGDROPPING": "true",
}
// Collect the volumes for this container.
volumes := map[string]string{
"/lib/modules": "/lib/modules",
"/tmp": "/tmp",
}
containerName := containers.UniqueName(fmt.Sprintf("felix-%d", id))
if os.Getenv("FELIX_FV_ENABLE_BPF") == "true" {
if !options.TestManagesBPF {
log.Info("FELIX_FV_ENABLE_BPF=true, enabling BPF with env var")
envVars["FELIX_BPFENABLED"] = "true"
} else {
log.Info("FELIX_FV_ENABLE_BPF=true but test manages BPF state itself, not using env var")
}
// Disable map repinning by default since BPF map names are global and we don't want our simulated instances to
// share maps.
envVars["FELIX_DebugBPFMapRepinEnabled"] = "false"
if CreateCgroupV2 {
envVars["FELIX_DEBUGBPFCGROUPV2"] = containerName
}
}
if options.DelayFelixStart {
envVars["DELAY_FELIX_START"] = "true"
}
for k, v := range options.ExtraEnvVars {
envVars[k] = v
}
for k, v := range envVars {
args = append(args, "-e", fmt.Sprintf("%s=%s", k, v))
}
// Add in the volumes.
for k, v := range options.ExtraVolumes {
volumes[k] = v
}
for k, v := range volumes {
args = append(args, "-v", fmt.Sprintf("%s:%s", k, v))
}
args = append(args,
utils.Config.FelixImage,
)
felixOpts := containers.RunOpts{
AutoRemove: true,
}
if options.FelixStopGraceful {
// Leave StopSignal defaulting to SIGTERM, and allow 10 seconds for Felix
// to handle that gracefully.
felixOpts.StopTimeoutSecs = 10
} else {
// Use SIGKILL to stop Felix immediately.
felixOpts.StopSignal = "SIGKILL"
}
c := containers.RunWithFixedName(containerName, felixOpts, args...)
if options.EnableIPv6 {
c.Exec("sysctl", "-w", "net.ipv6.conf.all.disable_ipv6=0")
c.Exec("sysctl", "-w", "net.ipv6.conf.default.disable_ipv6=0")
c.Exec("sysctl", "-w", "net.ipv6.conf.lo.disable_ipv6=0")
c.Exec("sysctl", "-w", "net.ipv6.conf.all.forwarding=1")
} else {
c.Exec("sysctl", "-w", "net.ipv6.conf.all.disable_ipv6=1")
c.Exec("sysctl", "-w", "net.ipv6.conf.default.disable_ipv6=1")
c.Exec("sysctl", "-w", "net.ipv6.conf.lo.disable_ipv6=1")
c.Exec("sysctl", "-w", "net.ipv6.conf.all.forwarding=0")
}
// Configure our model host to drop forwarded traffic by default. Modern
// Kubernetes/Docker hosts now have this setting, and the consequence is that
// whenever Calico policy intends to allow a packet, it must explicitly ACCEPT
// that packet, not just allow it to pass through cali-FORWARD and assume it will
// be accepted by the rest of the chain. Establishing that setting in this FV
// allows us to test that.
c.Exec("iptables",
"-w", "10", // Retry this for 10 seconds, e.g. if something else is holding the lock
"-W", "100000", // How often to probe the lock in microsecs.
"-P", "FORWARD", "DROP")
return &Felix{
Container: c,
startupDelayed: options.DelayFelixStart,
}
}
func (f *Felix) Stop() {
if CreateCgroupV2 {
_ = f.ExecMayFail("rmdir", path.Join("/run/calico/cgroup/", f.Name))
}
f.Container.Stop()
}
func (f *Felix) Restart() {
oldPID := f.GetFelixPID()
f.Exec("kill", "-HUP", fmt.Sprint(oldPID))
Eventually(f.GetFelixPID, "10s", "100ms").ShouldNot(Equal(oldPID))
}
// AttachTCPDump returns tcpdump attached to the container
func (f *Felix) AttachTCPDump(iface string) *tcpdump.TCPDump {
return tcpdump.Attach(f.Container.Name, "", iface)
}
func (f *Felix) ProgramIptablesDNAT(serviceIP, targetIP, chain string) {
f.Exec(
"iptables",
"-w", "10", // Retry this for 10 seconds, e.g. if something else is holding the lock
"-W", "100000", // How often to probe the lock in microsecs.
"-t", "nat", "-A", chain,
"--destination", serviceIP,
"-j", "DNAT", "--to-destination", targetIP,
)
}
| Metaswitch/calico | fv/infrastructure/felix.go | GO | apache-2.0 | 7,199 |
require "rubygems"
require "minitest/unit"
require "rdoc/markup/inline"
require "rdoc/markup/to_html_crossref"
class TestRDocMarkupAttributeManager < MiniTest::Unit::TestCase
def setup
@orig_special = RDoc::Markup::AttributeManager::SPECIAL
RDoc::Markup::AttributeManager::SPECIAL.replace Hash.new
@am = RDoc::Markup::AttributeManager.new
@bold_on = @am.changed_attribute_by_name([], [:BOLD])
@bold_off = @am.changed_attribute_by_name([:BOLD], [])
@tt_on = @am.changed_attribute_by_name([], [:TT])
@tt_off = @am.changed_attribute_by_name([:TT], [])
@em_on = @am.changed_attribute_by_name([], [:EM])
@em_off = @am.changed_attribute_by_name([:EM], [])
@bold_em_on = @am.changed_attribute_by_name([], [:BOLD] | [:EM])
@bold_em_off = @am.changed_attribute_by_name([:BOLD] | [:EM], [])
@em_then_bold = @am.changed_attribute_by_name([:EM], [:EM] | [:BOLD])
@em_to_bold = @am.changed_attribute_by_name([:EM], [:BOLD])
@am.add_word_pair("{", "}", :WOMBAT)
@wombat_on = @am.changed_attribute_by_name([], [:WOMBAT])
@wombat_off = @am.changed_attribute_by_name([:WOMBAT], [])
end
def teardown
RDoc::Markup::AttributeManager::SPECIAL.replace @orig_special
end
def crossref(text)
crossref_bitmap = RDoc::Markup::Attribute.bitmap_for(:_SPECIAL_) |
RDoc::Markup::Attribute.bitmap_for(:CROSSREF)
[ @am.changed_attribute_by_name([], [:CROSSREF, :_SPECIAL_]),
RDoc::Markup::Special.new(crossref_bitmap, text),
@am.changed_attribute_by_name([:CROSSREF, :_SPECIAL_], [])
]
end
def test_adding
assert_equal(["cat ", @wombat_on, "and", @wombat_off, " dog" ],
@am.flow("cat {and} dog"))
#assert_equal(["cat {and} dog" ], @am.flow("cat \\{and} dog"))
end
def test_add_word_pair
@am.add_word_pair '%', '&', 'percent and'
assert RDoc::Markup::AttributeManager::WORD_PAIR_MAP.include?(/(%)(\S+)(&)/)
assert RDoc::Markup::AttributeManager::PROTECTABLE.include?('%')
assert !RDoc::Markup::AttributeManager::PROTECTABLE.include?('&')
end
def test_add_word_pair_angle
e = assert_raises ArgumentError do
@am.add_word_pair '<', '>', 'angles'
end
assert_equal "Word flags may not start with '<'", e.message
end
def test_add_word_pair_matching
@am.add_word_pair '^', '^', 'caret'
assert RDoc::Markup::AttributeManager::MATCHING_WORD_PAIRS.include?('^')
assert RDoc::Markup::AttributeManager::PROTECTABLE.include?('^')
end
def test_basic
assert_equal(["cat"], @am.flow("cat"))
assert_equal(["cat ", @bold_on, "and", @bold_off, " dog"],
@am.flow("cat *and* dog"))
assert_equal(["cat ", @bold_on, "AND", @bold_off, " dog"],
@am.flow("cat *AND* dog"))
assert_equal(["cat ", @em_on, "And", @em_off, " dog"],
@am.flow("cat _And_ dog"))
assert_equal(["cat *and dog*"], @am.flow("cat *and dog*"))
assert_equal(["*cat and* dog"], @am.flow("*cat and* dog"))
assert_equal(["cat *and ", @bold_on, "dog", @bold_off],
@am.flow("cat *and *dog*"))
assert_equal(["cat ", @em_on, "and", @em_off, " dog"],
@am.flow("cat _and_ dog"))
assert_equal(["cat_and_dog"],
@am.flow("cat_and_dog"))
assert_equal(["cat ", @tt_on, "and", @tt_off, " dog"],
@am.flow("cat +and+ dog"))
assert_equal(["cat ", @bold_on, "a_b_c", @bold_off, " dog"],
@am.flow("cat *a_b_c* dog"))
assert_equal(["cat __ dog"],
@am.flow("cat __ dog"))
assert_equal(["cat ", @em_on, "_", @em_off, " dog"],
@am.flow("cat ___ dog"))
end
def test_bold
assert_equal [@bold_on, 'bold', @bold_off],
@am.flow("*bold*")
assert_equal [@bold_on, 'Bold:', @bold_off],
@am.flow("*Bold:*")
assert_equal [@bold_on, '\\bold', @bold_off],
@am.flow("*\\bold*")
end
def test_combined
assert_equal(["cat ", @em_on, "and", @em_off, " ", @bold_on, "dog", @bold_off],
@am.flow("cat _and_ *dog*"))
assert_equal(["cat ", @em_on, "a__nd", @em_off, " ", @bold_on, "dog", @bold_off],
@am.flow("cat _a__nd_ *dog*"))
end
def test_convert_attrs
str = '+foo+'
attrs = RDoc::Markup::AttrSpan.new str.length
@am.convert_attrs str, attrs
assert_equal "\000foo\000", str
str = '+:foo:+'
attrs = RDoc::Markup::AttrSpan.new str.length
@am.convert_attrs str, attrs
assert_equal "\000:foo:\000", str
str = '+x-y+'
attrs = RDoc::Markup::AttrSpan.new str.length
@am.convert_attrs str, attrs
assert_equal "\000x-y\000", str
end
def test_html_like_em_bold
assert_equal ["cat ", @em_on, "and ", @em_to_bold, "dog", @bold_off],
@am.flow("cat <i>and </i><b>dog</b>")
end
def test_html_like_em_bold_SGML
assert_equal ["cat ", @em_on, "and ", @em_to_bold, "dog", @bold_off],
@am.flow("cat <i>and <b></i>dog</b>")
end
def test_html_like_em_bold_nested_1
assert_equal(["cat ", @bold_em_on, "and", @bold_em_off, " dog"],
@am.flow("cat <i><b>and</b></i> dog"))
end
def test_html_like_em_bold_nested_2
assert_equal ["cat ", @em_on, "and ", @em_then_bold, "dog", @bold_em_off],
@am.flow("cat <i>and <b>dog</b></i>")
end
def test_html_like_em_bold_nested_mixed_case
assert_equal ["cat ", @em_on, "and ", @em_then_bold, "dog", @bold_em_off],
@am.flow("cat <i>and <B>dog</B></I>")
end
def test_html_like_em_bold_mixed_case
assert_equal ["cat ", @em_on, "and", @em_off, " ", @bold_on, "dog", @bold_off],
@am.flow("cat <i>and</i> <B>dog</b>")
end
def test_html_like_teletype
assert_equal ["cat ", @tt_on, "dog", @tt_off],
@am.flow("cat <tt>dog</Tt>")
end
def test_html_like_teletype_em_bold_SGML
assert_equal [@tt_on, "cat", @tt_off, " ", @em_on, "and ", @em_to_bold, "dog", @bold_off],
@am.flow("<tt>cat</tt> <i>and <b></i>dog</b>")
end
def test_protect
assert_equal(['cat \\ dog'], @am.flow('cat \\ dog'))
assert_equal(["cat <tt>dog</Tt>"], @am.flow("cat \\<tt>dog</Tt>"))
assert_equal(["cat ", @em_on, "and", @em_off, " <B>dog</b>"],
@am.flow("cat <i>and</i> \\<B>dog</b>"))
assert_equal(["*word* or <b>text</b>"], @am.flow("\\*word* or \\<b>text</b>"))
assert_equal(["_cat_", @em_on, "dog", @em_off],
@am.flow("\\_cat_<i>dog</i>"))
end
def test_special
@am.add_special(RDoc::Markup::ToHtmlCrossref::CROSSREF_REGEXP, :CROSSREF)
#
# The apostrophes in "cats'" and "dogs'" suppress the flagging of these
# words as potential cross-references, which is necessary for the unit
# tests. Unfortunately, the markup engine right now does not actually
# check whether a cross-reference is valid before flagging it.
#
assert_equal(["cats'"], @am.flow("cats'"))
assert_equal(["cats' ", crossref("#fred"), " dogs'"].flatten,
@am.flow("cats' #fred dogs'"))
assert_equal([crossref("#fred"), " dogs'"].flatten,
@am.flow("#fred dogs'"))
assert_equal(["cats' ", crossref("#fred")].flatten, @am.flow("cats' #fred"))
end
end
MiniTest::Unit.autorun
| google-code/android-scripting | jruby/src/test/externals/ruby1.9/rdoc/test_rdoc_markup_attribute_manager.rb | Ruby | apache-2.0 | 7,436 |
/*
Copyright 2016 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sshutil
import (
"fmt"
"testing"
"github.com/docker/machine/libmachine/drivers"
"k8s.io/minikube/pkg/minikube/tests"
)
type MockDriverBadPort struct{ tests.MockDriver }
func (MockDriverBadPort) GetSSHPort() (int, error) {
return 22, fmt.Errorf("bad port err")
}
func TestNewSSHClient(t *testing.T) {
s, err := tests.NewSSHServer(t)
if err != nil {
t.Fatalf("NewSSHServer: %v", err)
}
port, err := s.Start()
if err != nil {
t.Fatalf("Error starting ssh server: %v", err)
}
defer s.Stop()
d := &tests.MockDriver{
Port: port,
BaseDriver: drivers.BaseDriver{
IPAddress: "127.0.0.1",
SSHKeyPath: "",
},
T: t,
}
c, err := NewSSHClient(d)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
defer c.Close()
sess, err := c.NewSession()
if err != nil {
t.Fatal("Error creating new session for ssh client")
}
defer sess.Close()
cmd := "foo"
if err := sess.Run(cmd); err != nil {
t.Fatalf("Error running %q: %v", cmd, err)
}
if !s.Connected {
t.Fatalf("Server not connected")
}
if _, ok := s.Commands[cmd]; !ok {
t.Fatalf("Expected command: %s", cmd)
}
}
func TestNewSSHClientError(t *testing.T) {
t.Run("Bad Port", func(t *testing.T) {
d := MockDriverBadPort{}
_, err := NewSSHClient(&d)
if err == nil {
t.Fatalf("Expected to fail dor driver: %v", d)
}
})
t.Run("Bad ssh key path", func(t *testing.T) {
s, err := tests.NewSSHServer(t)
if err != nil {
t.Fatalf("NewSSHServer: %v", err)
}
port, err := s.Start()
if err != nil {
t.Fatalf("Error starting ssh server: %v", err)
}
defer s.Stop()
d := &tests.MockDriver{
Port: port,
BaseDriver: drivers.BaseDriver{
IPAddress: "127.0.0.1",
SSHKeyPath: "/etc/hosts",
},
T: t,
}
_, err = NewSSHClient(d)
if err == nil {
t.Fatalf("Expected to fail for driver: %v", d)
}
})
t.Run("Dial err", func(t *testing.T) {
d := &tests.MockDriver{
Port: 22,
BaseDriver: drivers.BaseDriver{
IPAddress: "127.0.0.1",
SSHKeyPath: "",
},
T: t,
}
_, err := NewSSHClient(d)
if err == nil {
t.Fatalf("Expected to fail for driver: %v", d)
}
})
}
func TestNewSSHHost(t *testing.T) {
sshKeyPath := "mypath"
ip := "localhost"
user := "myuser"
d := tests.MockDriver{
BaseDriver: drivers.BaseDriver{
IPAddress: ip,
SSHUser: user,
SSHKeyPath: sshKeyPath,
},
}
h, err := newSSHHost(&d)
if err != nil {
t.Fatalf("Unexpected error creating host: %v", err)
}
if h.SSHKeyPath != sshKeyPath {
t.Fatalf("%s != %s", h.SSHKeyPath, sshKeyPath)
}
if h.Username != user {
t.Fatalf("%s != %s", h.Username, user)
}
if h.IP != ip {
t.Fatalf("%s != %s", h.IP, ip)
}
}
func TestNewSSHHostError(t *testing.T) {
t.Run("Host error", func(t *testing.T) {
d := tests.MockDriver{HostError: true}
_, err := newSSHHost(&d)
if err == nil {
t.Fatal("Expected error for creating newSSHHost with host error, but got nil")
}
})
t.Run("Bad port", func(t *testing.T) {
d := MockDriverBadPort{}
_, err := newSSHHost(&d)
if err == nil {
t.Fatal("Expected error for creating newSSHHost with bad port, but got nil")
}
})
}
| dlorenc/minikube | pkg/minikube/sshutil/sshutil_test.go | GO | apache-2.0 | 3,735 |
// Code generated by go-swagger; DO NOT EDIT.
package admin
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/ory/hydra/internal/httpclient/models"
)
// DeleteOAuth2ClientReader is a Reader for the DeleteOAuth2Client structure.
type DeleteOAuth2ClientReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *DeleteOAuth2ClientReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 204:
result := NewDeleteOAuth2ClientNoContent()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
default:
result := NewDeleteOAuth2ClientDefault(response.Code())
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
if response.Code()/100 == 2 {
return result, nil
}
return nil, result
}
}
// NewDeleteOAuth2ClientNoContent creates a DeleteOAuth2ClientNoContent with default headers values
func NewDeleteOAuth2ClientNoContent() *DeleteOAuth2ClientNoContent {
return &DeleteOAuth2ClientNoContent{}
}
/*DeleteOAuth2ClientNoContent handles this case with default header values.
Empty responses are sent when, for example, resources are deleted. The HTTP status code for empty responses is
typically 201.
*/
type DeleteOAuth2ClientNoContent struct {
}
func (o *DeleteOAuth2ClientNoContent) Error() string {
return fmt.Sprintf("[DELETE /clients/{id}][%d] deleteOAuth2ClientNoContent ", 204)
}
func (o *DeleteOAuth2ClientNoContent) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
return nil
}
// NewDeleteOAuth2ClientDefault creates a DeleteOAuth2ClientDefault with default headers values
func NewDeleteOAuth2ClientDefault(code int) *DeleteOAuth2ClientDefault {
return &DeleteOAuth2ClientDefault{
_statusCode: code,
}
}
/*DeleteOAuth2ClientDefault handles this case with default header values.
jsonError
*/
type DeleteOAuth2ClientDefault struct {
_statusCode int
Payload *models.JSONError
}
// Code gets the status code for the delete o auth2 client default response
func (o *DeleteOAuth2ClientDefault) Code() int {
return o._statusCode
}
func (o *DeleteOAuth2ClientDefault) Error() string {
return fmt.Sprintf("[DELETE /clients/{id}][%d] deleteOAuth2Client default %+v", o._statusCode, o.Payload)
}
func (o *DeleteOAuth2ClientDefault) GetPayload() *models.JSONError {
return o.Payload
}
func (o *DeleteOAuth2ClientDefault) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.JSONError)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
| ory-am/go-iam | internal/httpclient/client/admin/delete_o_auth2_client_responses.go | GO | apache-2.0 | 3,026 |
/*
* Copyright 2010 netling project <http://netling.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file may incorporate work covered by the following copyright and
* permission notice:
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netling.ftp;
import java.io.IOException;
/***
* FTPConnectionClosedException is used to indicate the premature or
* unexpected closing of an FTP connection resulting from a
* {@link org.netling.ftp.FTPReply#SERVICE_NOT_AVAILABLE FTPReply.SERVICE_NOT_AVAILABLE }
* response (FTP reply code 421) to a
* failed FTP command. This exception is derived from IOException and
* therefore may be caught either as an IOException or specifically as an
* FTPConnectionClosedException.
* <p>
* <p>
* @see FTP
* @see FTPClient
***/
public class FTPConnectionClosedException extends IOException {
/*** Constructs a FTPConnectionClosedException with no message ***/
public FTPConnectionClosedException()
{
super();
}
/***
* Constructs a FTPConnectionClosedException with a specified message.
* <p>
* @param message The message explaining the reason for the exception.
***/
public FTPConnectionClosedException(String message)
{
super(message);
}
}
| rwinston/netling | src/main/java/org/netling/ftp/FTPConnectionClosedException.java | Java | apache-2.0 | 2,615 |
<?php
// DIR
define('DIR_APPLICATION', '/var/www/html/store/catalog/');
define('DIR_SYSTEM', '/var/www/html/store/system/');
define('DIR_DATABASE', '/var/www/html/store/system/database/');
define('DIR_LANGUAGE', '/var/www/html/store/catalog/language/');
define('DIR_TEMPLATE', '/var/www/html/store/catalog/view/theme/');
define('DIR_CONFIG', '/var/www/html/store/system/config/');
define('DIR_IMAGE', '/var/www/html/store/image/');
define('DIR_CACHE', '/var/www/html/store/system/cache/');
define('DIR_DOWNLOAD', '/var/www/html/store/download/');
define('DIR_LOGS', '/var/www/html/store/system/logs/');
// DB
define('DB_DRIVER', 'mysql');
define('DB_HOSTNAME', 'localhost');
define('DB_USERNAME', 'realityload');
define('DB_PASSWORD', 're@lityl0@d');
define('DB_DATABASE', 'opencart');
define('DB_PREFIX', 'opencart');
?> | yangchf/testBed | appSrc/store/config.php | PHP | apache-2.0 | 822 |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.channel.epoll;
import io.netty.bootstrap.AbstractBootstrap;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandler;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandler;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.netty.util.NetUtil;
import io.netty.util.ReferenceCountUtil;
import io.netty.util.ResourceLeakDetector;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
public class EpollReuseAddrTest {
private static final int MAJOR;
private static final int MINOR;
private static final int BUGFIX;
static {
String kernelVersion = Native.KERNEL_VERSION;
int index = kernelVersion.indexOf('-');
if (index > -1) {
kernelVersion = kernelVersion.substring(0, index);
}
String[] versionParts = kernelVersion.split("\\.");
if (versionParts.length <= 3) {
MAJOR = Integer.parseInt(versionParts[0]);
MINOR = Integer.parseInt(versionParts[1]);
if (versionParts.length == 3) {
BUGFIX = Integer.parseInt(versionParts[2]);
} else {
BUGFIX = 0;
}
} else {
throw new IllegalStateException("Can not parse kernel version " + kernelVersion);
}
}
@Test
public void testMultipleBindSocketChannelWithoutReusePortFails() {
Assume.assumeTrue(versionEqOrGt(3, 9, 0));
testMultipleBindDatagramChannelWithoutReusePortFails0(createServerBootstrap());
}
@Test
public void testMultipleBindDatagramChannelWithoutReusePortFails() {
Assume.assumeTrue(versionEqOrGt(3, 9, 0));
testMultipleBindDatagramChannelWithoutReusePortFails0(createBootstrap());
}
private static void testMultipleBindDatagramChannelWithoutReusePortFails0(AbstractBootstrap<?, ?, ?> bootstrap) {
bootstrap.handler(new LoggingHandler(LogLevel.ERROR));
ChannelFuture future = bootstrap.bind().syncUninterruptibly();
try {
bootstrap.bind(future.channel().localAddress()).syncUninterruptibly();
Assert.fail();
} catch (Exception e) {
Assert.assertTrue(e.getCause() instanceof IOException);
}
future.channel().close().syncUninterruptibly();
}
@Test(timeout = 10000)
public void testMultipleBindSocketChannel() throws Exception {
Assume.assumeTrue(versionEqOrGt(3, 9, 0));
ServerBootstrap bootstrap = createServerBootstrap();
bootstrap.option(EpollChannelOption.SO_REUSEPORT, true);
final AtomicBoolean accepted1 = new AtomicBoolean();
bootstrap.childHandler(new ServerSocketTestHandler(accepted1));
ChannelFuture future = bootstrap.bind().syncUninterruptibly();
InetSocketAddress address1 = (InetSocketAddress) future.channel().localAddress();
final AtomicBoolean accepted2 = new AtomicBoolean();
bootstrap.childHandler(new ServerSocketTestHandler(accepted2));
ChannelFuture future2 = bootstrap.bind(address1).syncUninterruptibly();
InetSocketAddress address2 = (InetSocketAddress) future2.channel().localAddress();
Assert.assertEquals(address1, address2);
while (!accepted1.get() || !accepted2.get()) {
Socket socket = new Socket(address1.getAddress(), address1.getPort());
socket.setReuseAddress(true);
socket.close();
}
future.channel().close().syncUninterruptibly();
future2.channel().close().syncUninterruptibly();
}
@Test(timeout = 10000)
@Ignore // TODO: Unignore after making it pass on centos6-1 and debian7-1
public void testMultipleBindDatagramChannel() throws Exception {
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.ADVANCED);
Assume.assumeTrue(versionEqOrGt(3, 9, 0));
Bootstrap bootstrap = createBootstrap();
bootstrap.option(EpollChannelOption.SO_REUSEPORT, true);
final AtomicBoolean received1 = new AtomicBoolean();
bootstrap.handler(new DatagramSocketTestHandler(received1));
ChannelFuture future = bootstrap.bind().syncUninterruptibly();
final InetSocketAddress address1 = (InetSocketAddress) future.channel().localAddress();
final AtomicBoolean received2 = new AtomicBoolean();
bootstrap.handler(new DatagramSocketTestHandler(received2));
ChannelFuture future2 = bootstrap.bind(address1).syncUninterruptibly();
final InetSocketAddress address2 = (InetSocketAddress) future2.channel().localAddress();
Assert.assertEquals(address1, address2);
final byte[] bytes = "data".getBytes();
// fire up 16 Threads and send DatagramPackets to make sure we stress it enough to see DatagramPackets received
// on both sockets.
int count = 16;
final CountDownLatch latch = new CountDownLatch(count);
Runnable r = () -> {
try {
DatagramSocket socket = new DatagramSocket();
while (!received1.get() || !received2.get()) {
socket.send(new DatagramPacket(
bytes, 0, bytes.length, address1.getAddress(), address1.getPort()));
}
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
latch.countDown();
};
ExecutorService executor = Executors.newFixedThreadPool(count);
for (int i = 0 ; i < count; i++) {
executor.execute(r);
}
latch.await();
executor.shutdown();
future.channel().close().syncUninterruptibly();
future2.channel().close().syncUninterruptibly();
Assert.assertTrue(received1.get());
Assert.assertTrue(received2.get());
}
private static ServerBootstrap createServerBootstrap() {
ServerBootstrap bootstrap = new ServerBootstrap();
bootstrap.group(EpollSocketTestPermutation.EPOLL_BOSS_GROUP, EpollSocketTestPermutation.EPOLL_WORKER_GROUP);
bootstrap.channel(EpollServerSocketChannel.class);
bootstrap.childHandler(new DummyHandler());
InetSocketAddress address = new InetSocketAddress(NetUtil.LOCALHOST, 0);
bootstrap.localAddress(address);
return bootstrap;
}
private static Bootstrap createBootstrap() {
Bootstrap bootstrap = new Bootstrap();
bootstrap.group(EpollSocketTestPermutation.EPOLL_WORKER_GROUP);
bootstrap.channel(EpollDatagramChannel.class);
InetSocketAddress address = new InetSocketAddress(NetUtil.LOCALHOST, 0);
bootstrap.localAddress(address);
return bootstrap;
}
private static boolean versionEqOrGt(int major, int minor, int bugfix) {
if (MAJOR > major) {
return true;
}
if (MAJOR == major) {
if (MINOR > minor) {
return true;
} else if (MINOR == minor) {
if (BUGFIX >= bugfix) {
return true;
}
}
}
return false;
}
@ChannelHandler.Sharable
private static class ServerSocketTestHandler implements ChannelInboundHandler {
private final AtomicBoolean accepted;
ServerSocketTestHandler(AtomicBoolean accepted) {
this.accepted = accepted;
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
accepted.set(true);
ctx.close();
}
}
@ChannelHandler.Sharable
private static class DatagramSocketTestHandler implements ChannelInboundHandler {
private final AtomicBoolean received;
DatagramSocketTestHandler(AtomicBoolean received) {
this.received = received;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ReferenceCountUtil.release(msg);
received.set(true);
}
}
@ChannelHandler.Sharable
private static final class DummyHandler extends ChannelHandlerAdapter { }
}
| gerdriesselmann/netty | transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollReuseAddrTest.java | Java | apache-2.0 | 9,362 |
/*
* Copyright (c) 2010, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sbql4j8.javax.lang.model.type;
import java.util.List;
/**
* Represents a union type.
*
* As of the {@link sbql4j8.javax.lang.model.SourceVersion#RELEASE_7
* RELEASE_7} source version, union types can appear as the type
* of a multi-catch exception parameter.
*
* @since 1.7
*/
public interface UnionType extends TypeMirror {
/**
* Return the alternatives comprising this union type.
*
* @return the alternatives comprising this union type.
*/
List<? extends TypeMirror> getAlternatives();
}
| emil-wcislo/sbql4j8 | sbql4j8/src/main/openjdk/sbql4j8/javax/lang/model/type/UnionType.java | Java | apache-2.0 | 1,752 |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.graph;
/**
* A graph where the parents of a particular node may be looked up.
*/
public interface ParentTraversableGraph<T> extends TraversableGraph<T> {
/** @return {@link Iterable} that the caller is not allowed to mutate. */
public Iterable<T> getIncomingNodesFor(T sink);
}
| justinmuller/buck | src/com/facebook/buck/graph/ParentTraversableGraph.java | Java | apache-2.0 | 918 |
'''
Implements the targetcli target related UI.
This file is part of targetcli.
Copyright (c) 2011-2013 by Datera, Inc
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
'''
try:
import ethtool
except ImportError:
ethtool = None
import os
import six
import stat
from configshell_fb import ExecutionError
from rtslib_fb import RTSLibBrokenLink, RTSLibError
from rtslib_fb import MappedLUN, NetworkPortal, NodeACL
from rtslib_fb import LUN, Target, TPG, StorageObjectFactory
from .ui_backstore import complete_path
from .ui_node import UINode, UIRTSLibNode
auth_params = ('userid', 'password', 'mutual_userid', 'mutual_password')
discovery_params = auth_params + ("enable",)
class UIFabricModule(UIRTSLibNode):
'''
A fabric module UI.
'''
def __init__(self, fabric_module, parent):
super(UIFabricModule, self).__init__(fabric_module.name,
fabric_module, parent,
late_params=True)
self.refresh()
if self.rtsnode.has_feature('discovery_auth'):
for param in discovery_params:
self.define_config_group_param('discovery_auth',
param, 'string')
self.refresh()
# Support late params
#
# By default the base class will call list_parameters and list_attributes
# in init. This stops us from being able to lazy-load fabric modules.
# We declare we support "late_params" to stop this, and then
# this code overrides the base class methods that involve enumerating
# this stuff, so we don't need to call list_parameters/attrs (which
# would cause the module to load) until the ui is actually asking for
# them from us.
# Currently fabricmodules don't have these anyways, this is all a CYA thing.
def list_config_groups(self):
groups = super(UIFabricModule, self).list_config_groups()
if len(self.rtsnode.list_parameters()):
groups.append('parameter')
if len(self.rtsnode.list_attributes()):
groups.append('attribute')
return groups
# Support late params (see above)
def list_group_params(self, group, writable=None):
if group not in ("parameter", "attribute"):
return super(UIFabricModule, self).list_group_params(group,
writable)
params_func = getattr(self.rtsnode, "list_%ss" % group)
params = params_func()
params_ro = params_func(writable=False)
ret_list = []
for param in params:
p_writable = param not in params_ro
if writable is not None and p_writable != writable:
continue
ret_list.append(param)
ret_list.sort()
return ret_list
# Support late params (see above)
def get_group_param(self, group, param):
if group not in ("parameter", "attribute"):
return super(UIFabricModule, self).get_group_param(group, param)
if param not in self.list_group_params(group):
raise ValueError("Not such parameter %s in configuration group %s"
% (param, group))
description = "The %s %s." % (param, group)
writable = param in self.list_group_params(group, writable=True)
return dict(name=param, group=group, type="string",
description=description, writable=writable)
def ui_getgroup_discovery_auth(self, auth_attr):
'''
This is the backend method for getting discovery_auth attributes.
@param auth_attr: The auth attribute to get the value of.
@type auth_attr: str
@return: The auth attribute's value
@rtype: str
'''
if auth_attr == 'enable':
return self.rtsnode.discovery_enable_auth
else:
return getattr(self.rtsnode, "discovery_" + auth_attr)
def ui_setgroup_discovery_auth(self, auth_attr, value):
'''
This is the backend method for setting discovery auth attributes.
@param auth_attr: The auth attribute to set the value of.
@type auth_attr: str
@param value: The auth's value
@type value: str
'''
self.assert_root()
if value is None:
value = ''
if auth_attr == 'enable':
self.rtsnode.discovery_enable_auth = value
else:
setattr(self.rtsnode, "discovery_" + auth_attr, value)
def refresh(self):
self._children = set([])
for target in self.rtsnode.targets:
self.shell.log.debug("Found target %s under fabric module %s."
% (target.wwn, target.fabric_module))
if target.has_feature('tpgts'):
UIMultiTPGTarget(target, self)
else:
UITarget(target, self)
def summary(self):
status = None
msg = []
fm = self.rtsnode
if fm.has_feature('discovery_auth') and fm.discovery_enable_auth:
if not (fm.discovery_password and fm.discovery_userid):
status = False
else:
status = True
if fm.discovery_authenticate_target:
msg.append("mutual disc auth")
else:
msg.append("1-way disc auth")
msg.append("Targets: %d" % len(self._children))
return (", ".join(msg), status)
def ui_command_create(self, wwn=None):
'''
Creates a new target. The I{wwn} format depends on the transport(s)
supported by the fabric module. If the I{wwn} is ommited, then a
target will be created using either a randomly generated WWN of the
proper type, or the first unused WWN in the list of possible WWNs if
one is available. If WWNs are constrained to a list (i.e. for hardware
targets addresses) and all WWNs are in use, the target creation will
fail. Use the B{info} command to get more information abour WWN type
and possible values.
SEE ALSO
========
B{info}
'''
self.assert_root()
target = Target(self.rtsnode, wwn, mode='create')
wwn = target.wwn
if self.rtsnode.wwns != None and wwn not in self.rtsnode.wwns:
self.shell.log.warning("Hardware missing for this WWN")
if target.has_feature('tpgts'):
ui_target = UIMultiTPGTarget(target, self)
self.shell.log.info("Created target %s." % wwn)
return ui_target.ui_command_create()
else:
ui_target = UITarget(target, self)
self.shell.log.info("Created target %s." % wwn)
return self.new_node(ui_target)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn' and self.rtsnode.wwns is not None:
existing_wwns = [child.wwn for child in self.rtsnode.targets]
completions = [wwn for wwn in self.rtsnode.wwns
if wwn.startswith(text)
if wwn not in existing_wwns]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, wwn):
'''
Recursively deletes the target with the specified I{wwn}, and all
objects hanging under it.
SEE ALSO
========
B{create}
'''
self.assert_root()
target = Target(self.rtsnode, wwn, mode='lookup')
target.delete()
self.shell.log.info("Deleted Target %s." % wwn)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn':
wwns = [child.name for child in self.children]
completions = [wwn for wwn in wwns if wwn.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_info(self):
'''
Displays information about the fabric module, notably the supported
transports(s) and accepted B{wwn} format(s), as long as supported
features.
'''
fabric = self.rtsnode
self.shell.log.info("Fabric module name: %s" % self.name)
self.shell.log.info("ConfigFS path: %s" % self.rtsnode.path)
self.shell.log.info("Allowed WWN types: %s" % ", ".join(fabric.wwn_types))
if fabric.wwns is not None:
self.shell.log.info("Allowed WWNs list: %s" % ', '.join(fabric.wwns))
self.shell.log.info("Fabric module features: %s" % ', '.join(fabric.features))
self.shell.log.info("Corresponding kernel module: %s"
% fabric.kernel_module)
def ui_command_version(self):
'''
Displays the target fabric module version.
'''
version = "Target fabric module %s: %s" \
% (self.rtsnode.name, self.rtsnode.version)
self.shell.con.display(version.strip())
class UIMultiTPGTarget(UIRTSLibNode):
'''
A generic target UI that has multiple TPGs.
'''
def __init__(self, target, parent):
super(UIMultiTPGTarget, self).__init__(target.wwn, target, parent)
self.refresh()
def refresh(self):
self._children = set([])
for tpg in self.rtsnode.tpgs:
UITPG(tpg, self)
def summary(self):
try:
self.rtsnode.fabric_module.to_normalized_wwn(self.rtsnode.wwn)
except:
return ("INVALID WWN", False)
return ("TPGs: %d" % len(self._children), None)
def ui_command_create(self, tag=None):
'''
Creates a new Target Portal Group within the target. The
I{tag} must be a positive integer value, optionally prefaced
by 'tpg'. If omitted, the next available Target Portal Group
Tag (TPGT) will be used.
SEE ALSO
========
B{delete}
'''
self.assert_root()
if tag:
if tag.startswith("tpg"):
tag = tag[3:]
try:
tag = int(tag)
except ValueError:
raise ExecutionError("Tag argument must be a number.")
tpg = TPG(self.rtsnode, tag, mode='create')
if self.shell.prefs['auto_enable_tpgt']:
tpg.enable = True
if tpg.has_feature("auth"):
tpg.set_attribute("authentication", 0)
self.shell.log.info("Created TPG %s." % tpg.tag)
if tpg.has_feature("nps") and self.shell.prefs['auto_add_default_portal']:
try:
NetworkPortal(tpg, "0.0.0.0")
self.shell.log.info("Global pref auto_add_default_portal=true")
self.shell.log.info("Created default portal listening on all IPs"
" (0.0.0.0), port 3260.")
except RTSLibError:
self.shell.log.info("Default portal not created, TPGs within a " +
"target cannot share ip:port.")
ui_tpg = UITPG(tpg, self)
return self.new_node(ui_tpg)
def ui_command_delete(self, tag):
'''
Deletes the Target Portal Group with TPGT I{tag} from the target. The
I{tag} must be a positive integer matching an existing TPGT.
SEE ALSO
========
B{create}
'''
self.assert_root()
if tag.startswith("tpg"):
tag = tag[3:]
try:
tag = int(tag)
except ValueError:
raise ExecutionError("Tag argument must be a number.")
tpg = TPG(self.rtsnode, tag, mode='lookup')
tpg.delete()
self.shell.log.info("Deleted TPGT %s." % tag)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'tag':
tags = [child.name[3:] for child in self.children]
completions = [tag for tag in tags if tag.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UITPG(UIRTSLibNode):
ui_desc_attributes = {
'authentication': ('number', 'If set to 1, enforce authentication for this TPG.'),
'cache_dynamic_acls': ('number', 'If set to 1 in demo mode, cache dynamically generated ACLs.'),
'default_cmdsn_depth': ('number', 'Default CmdSN (Command Sequence Number) depth.'),
'default_erl': ('number', 'Default Error Recovery Level.'),
'demo_mode_discovery': ('number', 'If set to 1 in demo mode, enable discovery.'),
'demo_mode_write_protect': ('number', 'If set to 1 in demo mode, prevent writes to LUNs.'),
'fabric_prot_type': ('number', 'Fabric DIF protection type.'),
'generate_node_acls': ('number', 'If set to 1, allow all initiators to login (i.e. demo mode).'),
'login_timeout': ('number', 'Login timeout value in seconds.'),
'netif_timeout': ('number', 'NIC failure timeout in seconds.'),
'prod_mode_write_protect': ('number', 'If set to 1, prevent writes to LUNs.'),
't10_pi': ('number', 'If set to 1, enable T10 Protection Information.'),
'tpg_enabled_sendtargets': ('number', 'If set to 1, the SendTargets discovery response advertises the TPG only if the TPG is enabled.'),
}
ui_desc_parameters = {
'AuthMethod': ('string', 'Authentication method used by the TPG.'),
'DataDigest': ('string', 'If set to CRC32C, the integrity of the PDU data part is verified.'),
'DataPDUInOrder': ('yesno', 'If set to Yes, the data PDUs within sequences must be in order.'),
'DataSequenceInOrder': ('yesno', 'If set to Yes, the data sequences must be in order.'),
'DefaultTime2Retain': ('number', 'Maximum time, in seconds, after an initial wait, before which an active task reassignment is still possible after an unexpected connection termination or a connection reset.'),
'DefaultTime2Wait': ('number', 'Minimum time, in seconds, to wait before attempting an explicit/implicit logout or an active task reassignment after an unexpected connection termination or a connection reset.'),
'ErrorRecoveryLevel': ('number', 'Recovery levels represent a combination of recovery capabilities.'),
'FirstBurstLength': ('number', 'Maximum amount in bytes of unsolicited data an initiator may send.'),
'HeaderDigest': ('string', 'If set to CRC32C, the integrity of the PDU header part is verified.'),
'IFMarker': ('yesno', 'Deprecated according to RFC 7143.'),
'IFMarkInt': ('string', 'Deprecated according to RFC 7143.'),
'ImmediateData': ('string', 'Immediate data support.'),
'InitialR2T': ('yesno', 'If set to No, the default use of R2T (Ready To Transfer) is disabled.'),
'MaxBurstLength': ('number', 'Maximum SCSI data payload in bytes in a Data-In or a solicited Data-Out iSCSI sequence.'),
'MaxConnections': ('number', 'Maximum number of connections acceptable.'),
'MaxOutstandingR2T': ('number', 'Maximum number of outstanding R2Ts per task.'),
'MaxRecvDataSegmentLength': ('number', 'Maximum data segment length in bytes the target can receive in an iSCSI PDU.'),
'MaxXmitDataSegmentLength': ('number', 'Outgoing MaxRecvDataSegmentLength sent over the wire during iSCSI login response.'),
'OFMarker': ('yesno', 'Deprecated according to RFC 7143.'),
'OFMarkInt': ('string', 'Deprecated according to RFC 7143.'),
'TargetAlias': ('string', 'Human-readable target name or description.'),
}
'''
A generic TPG UI.
'''
def __init__(self, tpg, parent):
name = "tpg%d" % tpg.tag
super(UITPG, self).__init__(name, tpg, parent)
self.refresh()
UILUNs(tpg, self)
if tpg.has_feature('acls'):
UINodeACLs(self.rtsnode, self)
if tpg.has_feature('nps'):
UIPortals(self.rtsnode, self)
if self.rtsnode.has_feature('auth') \
and os.path.exists(self.rtsnode.path + "/auth"):
for param in auth_params:
self.define_config_group_param('auth', param, 'string')
def summary(self):
tpg = self.rtsnode
status = None
msg = []
if tpg.has_feature('nexus'):
msg.append(str(self.rtsnode.nexus))
if not tpg.enable:
return ("disabled", False)
if tpg.has_feature("acls"):
if "generate_node_acls" in tpg.list_attributes() and \
int(tpg.get_attribute("generate_node_acls")):
msg.append("gen-acls")
else:
msg.append("no-gen-acls")
# 'auth' feature requires 'acls'
if tpg.has_feature("auth"):
if not int(tpg.get_attribute("authentication")):
msg.append("no-auth")
if int(tpg.get_attribute("generate_node_acls")):
# if auth=0, g_n_a=1 is recommended
status = True
else:
if not int(tpg.get_attribute("generate_node_acls")):
msg.append("auth per-acl")
else:
msg.append("tpg-auth")
status = True
if not (tpg.chap_password and tpg.chap_userid):
status = False
if tpg.authenticate_target:
msg.append("mutual auth")
else:
msg.append("1-way auth")
return (", ".join(msg), status)
def ui_getgroup_auth(self, auth_attr):
return getattr(self.rtsnode, "chap_" + auth_attr)
def ui_setgroup_auth(self, auth_attr, value):
self.assert_root()
if value is None:
value = ''
setattr(self.rtsnode, "chap_" + auth_attr, value)
def ui_command_enable(self):
'''
Enables the TPG.
SEE ALSO
========
B{disable status}
'''
self.assert_root()
if self.rtsnode.enable:
self.shell.log.info("The TPGT is already enabled.")
else:
try:
self.rtsnode.enable = True
self.shell.log.info("The TPGT has been enabled.")
except RTSLibError:
raise ExecutionError("The TPGT could not be enabled.")
def ui_command_disable(self):
'''
Disables the TPG.
SEE ALSO
========
B{enable status}
'''
self.assert_root()
if self.rtsnode.enable:
self.rtsnode.enable = False
self.shell.log.info("The TPGT has been disabled.")
else:
self.shell.log.info("The TPGT is already disabled.")
class UITarget(UITPG):
'''
A generic target UI merged with its only TPG.
'''
def __init__(self, target, parent):
super(UITarget, self).__init__(TPG(target, 1), parent)
self._name = target.wwn
self.target = target
if self.parent.name != "sbp":
self.rtsnode.enable = True
def summary(self):
try:
self.target.fabric_module.to_normalized_wwn(self.target.wwn)
except:
return ("INVALID WWN", False)
return super(UITarget, self).summary()
class UINodeACLs(UINode):
'''
A generic UI for node ACLs.
'''
def __init__(self, tpg, parent):
super(UINodeACLs, self).__init__("acls", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for name in self.all_names():
UINodeACL(name, self)
def summary(self):
return ("ACLs: %d" % len(self._children), None)
def ui_command_create(self, wwn, add_mapped_luns=None):
'''
Creates a Node ACL for the initiator node with the specified I{wwn}.
The node's I{wwn} must match the expected WWN Type of the target's
fabric module.
If I{add_mapped_luns} is omitted, the global parameter
B{auto_add_mapped_luns} will be used, else B{true} or B{false} are
accepted. If B{true}, then after creating the ACL, mapped LUNs will be
automatically created for all existing LUNs.
SEE ALSO
========
B{delete}
'''
self.assert_root()
add_mapped_luns = self.ui_eval_param(add_mapped_luns, 'bool',
self.shell.prefs['auto_add_mapped_luns'])
node_acl = NodeACL(self.tpg, wwn, mode="create")
ui_node_acl = UINodeACL(node_acl.node_wwn, self)
self.shell.log.info("Created Node ACL for %s" % node_acl.node_wwn)
if add_mapped_luns:
for lun in self.tpg.luns:
MappedLUN(node_acl, lun.lun, lun.lun, write_protect=False)
self.shell.log.info("Created mapped LUN %d." % lun.lun)
self.refresh()
return self.new_node(ui_node_acl)
def ui_command_delete(self, wwn):
'''
Deletes the Node ACL with the specified I{wwn}.
SEE ALSO
========
B{create}
'''
self.assert_root()
node_acl = NodeACL(self.tpg, wwn, mode='lookup')
node_acl.delete()
self.shell.log.info("Deleted Node ACL %s." % wwn)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn':
wwns = [acl.node_wwn for acl in self.tpg.node_acls]
completions = [wwn for wwn in wwns if wwn.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def find_tagged(self, name):
for na in self.tpg.node_acls:
if na.node_wwn == name:
yield na
elif na.tag == name:
yield na
def all_names(self):
names = set([])
for na in self.tpg.node_acls:
if na.tag:
names.add(na.tag)
else:
names.add(na.node_wwn)
return names
def ui_command_tag(self, wwn_or_tag, new_tag):
'''
Tag a NodeACL.
Usage: tag <wwn_or_tag> <new_tag>
Tags help manage initiator WWNs. A tag can apply to one or
more WWNs. This can give a more meaningful name to a single
initiator's configuration, or allow multiple initiators with
identical settings to be configured en masse.
The WWNs described by <wwn_or_tag> will be given the new
tag. If new_tag already exists, its new members will adopt the
current tag's configuration.
Within a tag, the 'info' command shows the WWNs the tag applies to.
Use 'untag' to remove tags.
NOTE: tags are only supported in kernel 3.8 and above.
'''
if wwn_or_tag == new_tag:
return
# Since all WWNs have a '.' in them, let's avoid confusion.
if '.' in new_tag:
raise ExecutionError("'.' not permitted in tag names.")
src = list(self.find_tagged(wwn_or_tag))
if not src:
raise ExecutionError("wwn_or_tag %s not found." % wwn_or_tag)
old_tag_members = list(self.find_tagged(new_tag))
# handle overlap
src_wwns = [na.node_wwn for na in src]
old_tag_members = [old for old in old_tag_members if old.node_wwn not in src_wwns]
for na in src:
na.tag = new_tag
# if joining a tag, take its config
if old_tag_members:
model = old_tag_members[0]
for mlun in na.mapped_luns:
mlun.delete()
for mlun in model.mapped_luns:
MappedLUN(na, mlun.mapped_lun, mlun.tpg_lun, mlun.write_protect)
if self.parent.rtsnode.has_feature("auth"):
for param in auth_params:
setattr(na, "chap_" + param, getattr(model, "chap_" + param))
for item in model.list_attributes(writable=True):
na.set_attribute(item, model.get_attribute(item))
for item in model.list_parameters(writable=True):
na.set_parameter(item, model.get_parameter(item))
self.refresh()
def ui_command_untag(self, wwn_or_tag):
'''
Untag a NodeACL.
Usage: untag <tag>
Remove the tag given to one or more initiator WWNs. They will
return to being displayed by WWN in the configuration tree, and
will maintain settings from when they were tagged.
'''
for na in list(self.find_tagged(wwn_or_tag)):
na.tag = None
self.refresh()
def ui_complete_tag(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command tag
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'wwn_or_tag':
completions = [n for n in self.all_names() if n.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
ui_complete_untag = ui_complete_tag
class UINodeACL(UIRTSLibNode):
'''
A generic UI for a node ACL.
Handles grouping multiple NodeACLs in UI via tags.
All gets are performed against first NodeACL.
All sets are performed on all NodeACLs.
This is to make management of multiple ACLs easier.
'''
ui_desc_attributes = {
'dataout_timeout': ('number', 'Data-Out timeout in seconds before invoking recovery.'),
'dataout_timeout_retries': ('number', 'Number of Data-Out timeout recovery attempts before failing a path.'),
'default_erl': ('number', 'Default Error Recovery Level.'),
'nopin_response_timeout': ('number', 'Nop-In response timeout in seconds.'),
'nopin_timeout': ('number', 'Nop-In timeout in seconds.'),
'random_datain_pdu_offsets': ('number', 'If set to 1, request random Data-In PDU offsets.'),
'random_datain_seq_offsets': ('number', 'If set to 1, request random Data-In sequence offsets.'),
'random_r2t_offsets': ('number', 'If set to 1, request random R2T (Ready To Transfer) offsets.'),
}
ui_desc_parameters = UITPG.ui_desc_parameters
def __init__(self, name, parent):
# Don't want to duplicate work in UIRTSLibNode, so call it but
# del self.rtsnode to make sure we always use self.rtsnodes.
self.rtsnodes = list(parent.find_tagged(name))
super(UINodeACL, self).__init__(name, self.rtsnodes[0], parent)
del self.rtsnode
if self.parent.parent.rtsnode.has_feature('auth'):
for parameter in auth_params:
self.define_config_group_param('auth', parameter, 'string')
self.refresh()
def ui_getgroup_auth(self, auth_attr):
'''
This is the backend method for getting auths attributes.
@param auth_attr: The auth attribute to get the value of.
@type auth_attr: str
@return: The auth attribute's value
@rtype: str
'''
# All should return same, so just return from the first one
return getattr(self.rtsnodes[0], "chap_" + auth_attr)
def ui_setgroup_auth(self, auth_attr, value):
'''
This is the backend method for setting auths attributes.
@param auth_attr: The auth attribute to set the value of.
@type auth_attr: str
@param value: The auth's value
@type value: str
'''
self.assert_root()
if value is None:
value = ''
for na in self.rtsnodes:
setattr(na, "chap_" + auth_attr, value)
def refresh(self):
self._children = set([])
for mlun in self.rtsnodes[0].mapped_luns:
UIMappedLUN(mlun, self)
def summary(self):
msg = []
if self.name != self.rtsnodes[0].node_wwn:
if len(self.rtsnodes) > 1:
msg.append("(group of %d)" % len(self.rtsnodes))
else:
msg.append("(%s)" % self.rtsnodes[0].node_wwn)
status = None
na = self.rtsnodes[0]
tpg = self.parent.parent.rtsnode
if tpg.has_feature("auth") and \
int(tpg.get_attribute("authentication")):
if int(tpg.get_attribute("generate_node_acls")):
msg.append("auth via tpg")
else:
status = True
if not (na.chap_password and na.chap_userid):
status = False
if na.authenticate_target:
msg.append("mutual auth")
else:
msg.append("1-way auth")
msg.append("Mapped LUNs: %d" % len(self._children))
return (", ".join(msg), status)
def ui_command_create(self, mapped_lun, tpg_lun_or_backstore, write_protect=None):
'''
Creates a mapping to one of the TPG LUNs for the initiator referenced
by the ACL. The provided I{tpg_lun_or_backstore} will appear to that
initiator as LUN I{mapped_lun}. If the I{write_protect} flag is set to
B{1}, the initiator will not have write access to the Mapped LUN.
A storage object may also be given for the I{tpg_lun_or_backstore} parameter,
in which case the TPG LUN will be created for that backstore before
mapping the LUN to the initiator. If a TPG LUN for the backstore already
exists, the Mapped LUN will map to that TPG LUN.
Finally, a path to an existing block device or file can be given. If so,
a storage object of the appropriate type is created with default parameters,
followed by the TPG LUN and the Mapped LUN.
SEE ALSO
========
B{delete}
'''
self.assert_root()
try:
mapped_lun = int(mapped_lun)
except ValueError:
raise ExecutionError("mapped_lun must be an integer")
try:
if tpg_lun_or_backstore.startswith("lun"):
tpg_lun_or_backstore = tpg_lun_or_backstore[3:]
tpg_lun = int(tpg_lun_or_backstore)
except ValueError:
try:
so = self.get_node(tpg_lun_or_backstore).rtsnode
except ValueError:
try:
so = StorageObjectFactory(tpg_lun_or_backstore)
self.shell.log.info("Created storage object %s." % so.name)
except RTSLibError:
raise ExecutionError("LUN, storage object, or path not valid")
self.get_node("/backstores").refresh()
ui_tpg = self.parent.parent
for lun in ui_tpg.rtsnode.luns:
if so == lun.storage_object:
tpg_lun = lun.lun
break
else:
lun_object = LUN(ui_tpg.rtsnode, storage_object=so)
self.shell.log.info("Created LUN %s." % lun_object.lun)
ui_lun = UILUN(lun_object, ui_tpg.get_node("luns"))
tpg_lun = ui_lun.rtsnode.lun
if tpg_lun in (ml.tpg_lun.lun for ml in self.rtsnodes[0].mapped_luns):
self.shell.log.warning(
"Warning: TPG LUN %d already mapped to this NodeACL" % tpg_lun)
for na in self.rtsnodes:
mlun = MappedLUN(na, mapped_lun, tpg_lun, write_protect)
ui_mlun = UIMappedLUN(mlun, self)
self.shell.log.info("Created Mapped LUN %s." % mlun.mapped_lun)
return self.new_node(ui_mlun)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'tpg_lun_or_backstore':
completions = []
for backstore in self.get_node('/backstores').children:
for storage_object in backstore.children:
completions.append(storage_object.path)
completions.extend(lun.name for lun in self.parent.parent.get_node("luns").children)
completions.extend(complete_path(text, lambda x: stat.S_ISREG(x) or stat.S_ISBLK(x)))
completions = [c for c in completions if c.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, mapped_lun):
'''
Deletes the specified I{mapped_lun}.
SEE ALSO
========
B{create}
'''
self.assert_root()
for na in self.rtsnodes:
mlun = MappedLUN(na, mapped_lun)
mlun.delete()
self.shell.log.info("Deleted Mapped LUN %s." % mapped_lun)
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'mapped_lun':
mluns = [str(mlun.mapped_lun) for mlun in self.rtsnodes[0].mapped_luns]
completions = [mlun for mlun in mluns if mlun.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
# Override these four methods to handle multiple NodeACLs
def ui_getgroup_attribute(self, attribute):
return self.rtsnodes[0].get_attribute(attribute)
def ui_setgroup_attribute(self, attribute, value):
self.assert_root()
for na in self.rtsnodes:
na.set_attribute(attribute, value)
def ui_getgroup_parameter(self, parameter):
return self.rtsnodes[0].get_parameter(parameter)
def ui_setgroup_parameter(self, parameter, value):
self.assert_root()
for na in self.rtsnodes:
na.set_parameter(parameter, value)
def ui_command_info(self):
'''
Since we don't have a self.rtsnode we can't use the base implementation
of this method. We also want to not print node_wwn, but list *all*
wwns for this entry.
'''
info = self.rtsnodes[0].dump()
for item in ('attributes', 'parameters', "node_wwn"):
if item in info:
del info[item]
for name, value in sorted(six.iteritems(info)):
if not isinstance (value, (dict, list)):
self.shell.log.info("%s: %s" % (name, value))
self.shell.log.info("wwns:")
for na in self.parent.find_tagged(self.name):
self.shell.log.info(na.node_wwn)
class UIMappedLUN(UIRTSLibNode):
'''
A generic UI for MappedLUN objects.
'''
def __init__(self, mapped_lun, parent):
name = "mapped_lun%d" % mapped_lun.mapped_lun
super(UIMappedLUN, self).__init__(name, mapped_lun, parent)
self.refresh()
def summary(self):
mapped_lun = self.rtsnode
is_healthy = True
try:
tpg_lun = mapped_lun.tpg_lun
except RTSLibBrokenLink:
description = "BROKEN LUN LINK"
is_healthy = False
else:
if mapped_lun.write_protect:
access_mode = 'ro'
else:
access_mode = 'rw'
description = "lun%d %s/%s (%s)" \
% (tpg_lun.lun, tpg_lun.storage_object.plugin,
tpg_lun.storage_object.name, access_mode)
return (description, is_healthy)
class UILUNs(UINode):
'''
A generic UI for TPG LUNs.
'''
def __init__(self, tpg, parent):
super(UILUNs, self).__init__("luns", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for lun in self.tpg.luns:
UILUN(lun, self)
def summary(self):
return ("LUNs: %d" % len(self._children), None)
def ui_command_create(self, storage_object, lun=None,
add_mapped_luns=None):
'''
Creates a new LUN in the Target Portal Group, attached to a storage
object. If the I{lun} parameter is omitted, the first available LUN in
the TPG will be used. If present, it must be a number greater than 0.
Alternatively, the syntax I{lunX} where I{X} is a positive number is
also accepted.
The I{storage_object} may be the path of an existing storage object,
i.e. B{/backstore/pscsi0/mydisk} to reference the B{mydisk} storage
object of the virtual HBA B{pscsi0}. It also may be the path to an
existing block device or image file, in which case a storage object
will be created for it first, with default parameters.
If I{add_mapped_luns} is omitted, the global parameter
B{auto_add_mapped_luns} will be used, else B{true} or B{false} are
accepted. If B{true}, then after creating the LUN, mapped LUNs will be
automatically created for all existing node ACLs, mapping the new LUN.
SEE ALSO
========
B{delete}
'''
self.assert_root()
add_mapped_luns = \
self.ui_eval_param(add_mapped_luns, 'bool',
self.shell.prefs['auto_add_mapped_luns'])
try:
so = self.get_node(storage_object).rtsnode
except ValueError:
try:
so = StorageObjectFactory(storage_object)
self.shell.log.info("Created storage object %s." % so.name)
except RTSLibError:
raise ExecutionError("storage object or path not valid")
self.get_node("/backstores").refresh()
if so in (l.storage_object for l in self.parent.rtsnode.luns):
raise ExecutionError("lun for storage object %s/%s already exists" \
% (so.plugin, so.name))
if lun and lun.lower().startswith('lun'):
lun = lun[3:]
lun_object = LUN(self.tpg, lun, so)
self.shell.log.info("Created LUN %s." % lun_object.lun)
ui_lun = UILUN(lun_object, self)
if add_mapped_luns:
for acl in self.tpg.node_acls:
if lun:
mapped_lun = lun
else:
mapped_lun = 0
existing_mluns = [mlun.mapped_lun for mlun in acl.mapped_luns]
if mapped_lun in existing_mluns:
mapped_lun = None
for possible_mlun in six.moves.range(MappedLUN.MAX_LUN):
if possible_mlun not in existing_mluns:
mapped_lun = possible_mlun
break
if mapped_lun == None:
self.shell.log.warning(
"Cannot map new lun %s into ACL %s"
% (lun_object.lun, acl.node_wwn))
else:
mlun = MappedLUN(acl, mapped_lun, lun_object, write_protect=False)
self.shell.log.info("Created LUN %d->%d mapping in node ACL %s"
% (mlun.tpg_lun.lun, mlun.mapped_lun, acl.node_wwn))
self.parent.refresh()
return self.new_node(ui_lun)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'storage_object':
storage_objects = []
for backstore in self.get_node('/backstores').children:
for storage_object in backstore.children:
storage_objects.append(storage_object.path)
completions = [so for so in storage_objects if so.startswith(text)]
completions.extend(complete_path(text, lambda x: stat.S_ISREG(x) or stat.S_ISBLK(x)))
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, lun):
'''
Deletes the supplied LUN from the Target Portal Group. The I{lun} must
be a positive number matching an existing LUN.
Alternatively, the syntax I{lunX} where I{X} is a positive number is
also accepted.
SEE ALSO
========
B{create}
'''
self.assert_root()
if lun.lower().startswith("lun"):
lun = lun[3:]
try:
lun_object = LUN(self.tpg, lun)
except:
raise RTSLibError("Invalid LUN")
lun_object.delete()
self.shell.log.info("Deleted LUN %s." % lun)
# Refresh the TPG as we need to also refresh acls MappedLUNs
self.parent.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
if current_param == 'lun':
luns = [str(lun.lun) for lun in self.tpg.luns]
completions = [lun for lun in luns if lun.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UILUN(UIRTSLibNode):
'''
A generic UI for LUN objects.
'''
def __init__(self, lun, parent):
name = "lun%d" % lun.lun
super(UILUN, self).__init__(name, lun, parent)
self.refresh()
self.define_config_group_param("alua", "alua_tg_pt_gp_name", 'string')
def summary(self):
lun = self.rtsnode
is_healthy = True
try:
storage_object = lun.storage_object
except RTSLibBrokenLink:
description = "BROKEN STORAGE LINK"
is_healthy = False
else:
description = "%s/%s" % (storage_object.plugin, storage_object.name,)
if storage_object.udev_path:
description += " (%s)" % storage_object.udev_path
description += " (%s)" % lun.alua_tg_pt_gp_name
return (description, is_healthy)
def ui_getgroup_alua(self, alua_attr):
return getattr(self.rtsnode, alua_attr)
def ui_setgroup_alua(self, alua_attr, value):
self.assert_root()
if value is None:
return
setattr(self.rtsnode, alua_attr, value)
class UIPortals(UINode):
'''
A generic UI for TPG network portals.
'''
def __init__(self, tpg, parent):
super(UIPortals, self).__init__("portals", parent)
self.tpg = tpg
self.refresh()
def refresh(self):
self._children = set([])
for portal in self.tpg.network_portals:
UIPortal(portal, self)
def summary(self):
return ("Portals: %d" % len(self._children), None)
def _canonicalize_ip(self, ip_address):
"""
rtslib expects ipv4 addresses as a dotted-quad string, and IPv6
addresses surrounded by brackets.
"""
# Contains a '.'? Must be ipv4, right?
if "." in ip_address:
return ip_address
return "[" + ip_address + "]"
def ui_command_create(self, ip_address=None, ip_port=None):
'''
Creates a Network Portal with specified I{ip_address} and
I{ip_port}. If I{ip_port} is omitted, the default port for
the target fabric will be used. If I{ip_address} is omitted,
INADDR_ANY (0.0.0.0) will be used.
Choosing IN6ADDR_ANY (::0) will listen on all IPv6 interfaces
as well as IPv4, assuming IPV6_V6ONLY sockopt has not been
set.
Note: Portals on Link-local IPv6 addresses are currently not
supported.
SEE ALSO
========
B{delete}
'''
self.assert_root()
# FIXME: Add a specfile parameter to determine default port
ip_port = self.ui_eval_param(ip_port, 'number', 3260)
ip_address = self.ui_eval_param(ip_address, 'string', "0.0.0.0")
if ip_port == 3260:
self.shell.log.info("Using default IP port %d" % ip_port)
if ip_address == "0.0.0.0":
self.shell.log.info("Binding to INADDR_ANY (0.0.0.0)")
portal = NetworkPortal(self.tpg, self._canonicalize_ip(ip_address),
ip_port, mode='create')
self.shell.log.info("Created network portal %s:%d."
% (ip_address, ip_port))
ui_portal = UIPortal(portal, self)
return self.new_node(ui_portal)
def ui_complete_create(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command create.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
def list_eth_ips():
if not ethtool:
return []
devcfgs = ethtool.get_interfaces_info(ethtool.get_devices())
addrs = set()
for d in devcfgs:
if d.ipv4_address:
addrs.add(d.ipv4_address)
addrs.add("0.0.0.0")
for ip6 in d.get_ipv6_addresses():
addrs.add(ip6.address)
addrs.add("::0") # only list ::0 if ipv6 present
return sorted(addrs)
if current_param == 'ip_address':
completions = [addr for addr in list_eth_ips()
if addr.startswith(text)]
else:
completions = []
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
def ui_command_delete(self, ip_address, ip_port):
'''
Deletes the Network Portal with specified I{ip_address} and I{ip_port}.
SEE ALSO
========
B{create}
'''
self.assert_root()
portal = NetworkPortal(self.tpg, self._canonicalize_ip(ip_address),
ip_port, mode='lookup')
portal.delete()
self.shell.log.info("Deleted network portal %s:%s"
% (ip_address, ip_port))
self.refresh()
def ui_complete_delete(self, parameters, text, current_param):
'''
Parameter auto-completion method for user command delete.
@param parameters: Parameters on the command line.
@type parameters: dict
@param text: Current text of parameter being typed by the user.
@type text: str
@param current_param: Name of parameter to complete.
@type current_param: str
@return: Possible completions
@rtype: list of str
'''
completions = []
# TODO: Check if a dict comprehension is acceptable here with supported
# XXX: python versions.
portals = {}
all_ports = set([])
for portal in self.tpg.network_portals:
all_ports.add(str(portal.port))
portal_ip = portal.ip_address.strip('[]')
if not portal_ip in portals:
portals[portal_ip] = []
portals[portal_ip].append(str(portal.port))
if current_param == 'ip_address':
completions = [addr for addr in portals if addr.startswith(text)]
if 'ip_port' in parameters:
port = parameters['ip_port']
completions = [addr for addr in completions
if port in portals[addr]]
elif current_param == 'ip_port':
if 'ip_address' in parameters:
addr = parameters['ip_address']
if addr in portals:
completions = [port for port in portals[addr]
if port.startswith(text)]
else:
completions = [port for port in all_ports
if port.startswith(text)]
if len(completions) == 1:
return [completions[0] + ' ']
else:
return completions
class UIPortal(UIRTSLibNode):
'''
A generic UI for a network portal.
'''
def __init__(self, portal, parent):
name = "%s:%s" % (portal.ip_address, portal.port)
super(UIPortal, self).__init__(name, portal, parent)
self.refresh()
def summary(self):
if self.rtsnode.iser:
return('iser', True)
elif self.rtsnode.offload:
return('offload', True)
return ('', True)
def ui_command_enable_iser(self, boolean):
'''
Enables or disables iSER for this NetworkPortal.
If iSER is not supported by the kernel, this command will do nothing.
'''
boolean = self.ui_eval_param(boolean, 'bool', False)
self.rtsnode.iser = boolean
self.shell.log.info("iSER enable now: %s" % self.rtsnode.iser)
def ui_command_enable_offload(self, boolean):
'''
Enables or disables offload for this NetworkPortal.
If offload is not supported by the kernel, this command will do nothing.
'''
boolean = self.ui_eval_param(boolean, 'bool', False)
self.rtsnode.offload = boolean
self.shell.log.info("offload enable now: %s" % self.rtsnode.offload)
| agrover/targetcli-fb | targetcli/ui_target.py | Python | apache-2.0 | 54,009 |
/*
* Copyright 2017 Patrick Ahlbrecht
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.onyxbits.raccoon.gplay;
import java.util.ArrayList;
import java.util.List;
import de.onyxbits.raccoon.ptools.Device;
/**
* Create a mock device based on an existing device, connected via ADB.
*
* @author patrick
*
*/
public class RealDevice extends MockDevice {
private Device device;
private String versionName;
private int versionCode;
/**
* Create a new mock. Note: we don't attempt to retrieve version information
* of the Finsky app from the device. The premise of Raccoon is that the user
* either does not have Play Services installed or doesn't want to use them
* (and hence does not keep them up to date).
*
* @param device
* a device, connected via ADB
* @param versionName
* version name of the Finsky app
* @param versionCode
* version code of the Finsky app
*/
public RealDevice(Device device, String versionName, int versionCode) {
this.device = device;
this.versionName = versionName;
this.versionCode = versionCode;
}
@Override
public int getFinskyVersionCode() {
return versionCode;
}
@Override
public String getFinskyVersion() {
return versionName;
}
@Override
public int getSdkVersion() {
try {
return Integer.parseInt(device.getProperty("ro.build.version.sdk",
UNKNOWN));
}
catch (NumberFormatException e) {
return 14;
}
}
@Override
public String getDevice() {
return device.getProperty("ro.product.device", UNKNOWN);
}
@Override
public String getHardware() {
return device.getProperty("ro.hardware", UNKNOWN);
}
@Override
public String getBuildProduct() {
return device.getProperty("ro.build.product", UNKNOWN);
}
@Override
public String getBuildId() {
return device.getProperty("ro.build.id", UNKNOWN);
}
@Override
public String getBuildType() {
return device.getProperty("ro.build.type", UNKNOWN);
}
@Override
public List<String> getNativePlatforms() {
List<String> abis = new ArrayList<String>();
try {
// Stuff works differently pre-lollipop
if (Integer.parseInt(device.getProperty("ro.build.version.sdk", null)) < 21) {
String tmp = device.getProperty("ro.product.cpu.abi", null);
if (tmp != null) {
abis.add(tmp);
}
tmp = device.getProperty("ro.product.cpu.abi2", null);
if (tmp != null) {
abis.add(tmp);
}
}
else {
String[] tmp = device.getProperty("ro.product.cpu.abilist", "").split(
" *, *");
for (String s : tmp) {
abis.add(s);
}
}
}
catch (Exception e) {
}
return abis;
}
@Override
public List<String> getSharedLibraries() {
return device.getSharedLibraries();
}
@Override
public List<String> getSystemFeatures() {
return device.getSystemFeatures();
}
@Override
public int getScreenDensity() {
try {
return Integer.parseInt(device.getProperty("ro.sf.lcd_density", UNKNOWN));
}
catch (NumberFormatException e) {
return 240;
}
}
@Override
public String getManufacturer() {
return device.getProperty("ro.product.manufacturer", UNKNOWN);
}
@Override
public String getBootloader() {
return device.getProperty("ro.bootloader", UNKNOWN);
}
@Override
public String getFingerprint() {
return device.getProperty("ro.build.fingerprint", UNKNOWN);
}
@Override
public int getScreenWidth() {
return 0;
}
@Override
public int getScreenHeight() {
return 0;
}
@Override
public int getGlEsVersion() {
try {
return Integer.parseInt(device
.getProperty("ro.opengles.version", UNKNOWN));
}
catch (NumberFormatException e) {
return 310260;
}
}
}
| onyxbits/raccoon4 | src/main/java/de/onyxbits/raccoon/gplay/RealDevice.java | Java | apache-2.0 | 4,179 |
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from nova import test
from nova.tests.unit.virt.libvirt import fakelibvirt
from nova import utils
from nova.virt import fake
from nova.virt.libvirt import driver
from nova.virt.libvirt.volume import fs
FAKE_MOUNT_POINT = '/var/lib/nova/fake-mount'
FAKE_SHARE = 'fake-share'
NORMALIZED_SHARE = FAKE_SHARE + '-normalized'
HASHED_SHARE = utils.get_hash_str(NORMALIZED_SHARE)
FAKE_DEVICE_NAME = 'fake-device'
class FSVolumeDriverSubclassSignatureTestCase(test.SubclassSignatureTestCase):
def _get_base_class(self):
# We do this because it has the side-effect of loading all the
# volume drivers
self.useFixture(fakelibvirt.FakeLibvirtFixture())
driver.LibvirtDriver(fake.FakeVirtAPI(), False)
return fs.LibvirtBaseFileSystemVolumeDriver
class FakeFileSystemVolumeDriver(fs.LibvirtBaseFileSystemVolumeDriver):
def _get_mount_point_base(self):
return FAKE_MOUNT_POINT
def _normalize_export(self, export):
return NORMALIZED_SHARE
class LibvirtBaseFileSystemVolumeDriverTestCase(test.NoDBTestCase):
"""Tests the basic behavior of the LibvirtBaseFileSystemVolumeDriver"""
def setUp(self):
super(LibvirtBaseFileSystemVolumeDriverTestCase, self).setUp()
self.connection = mock.Mock()
self.driver = FakeFileSystemVolumeDriver(self.connection)
self.connection_info = {
'data': {
'export': FAKE_SHARE,
'name': FAKE_DEVICE_NAME,
}
}
def test_get_device_path(self):
path = self.driver._get_device_path(self.connection_info)
expected_path = os.path.join(FAKE_MOUNT_POINT,
HASHED_SHARE,
FAKE_DEVICE_NAME)
self.assertEqual(expected_path, path)
| rajalokan/nova | nova/tests/unit/virt/libvirt/volume/test_fs.py | Python | apache-2.0 | 2,437 |
package com.appspot.pistatium.houkagoapp;
import android.content.Intent;
import android.os.Bundle;
import com.appspot.pistatium.houkagoapp.base.ActivityBase;
import com.appspot.pistatium.utilities.L;
import com.appspot.pistatium.utilities.Pref;
public class SplashActivity extends ActivityBase{
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Pref.IS_FIRST_LAUNCH.getBool(getApplicationContext())) {
onFirstLaunch();
Pref.IS_FIRST_LAUNCH.set(getApplicationContext(), false);
}
Intent main = new Intent(getApplication(), MainActivity_.class);
onLaunch(main);
startActivity(main);
finish();
}
private void onLaunch(Intent main){
checkIntentFilter(main);
}
private void checkIntentFilter(Intent main){
Intent intent = getIntent();
String action = intent.getAction();
if (Intent.ACTION_SEND.equals(action)) {
Bundle extras = intent.getExtras();
if (extras != null) {
CharSequence ext = extras.getCharSequence(Intent.EXTRA_TEXT);
if (ext != null) {
L.d((String) ext);
main.putExtra("fromShareInput", ext);
}
}
}
}
private void onFirstLaunch(){
}
}
| pistatium/houkago_app | android/src/com/appspot/pistatium/houkagoapp/SplashActivity.java | Java | apache-2.0 | 1,266 |
// Copyright Dirk Lemstra https://github.com/dlemstra/Magick.NET.
// Licensed under the Apache License, Version 2.0.
namespace ImageMagick
{
internal sealed class ExifSignedByteArray : ExifArrayValue<sbyte>
{
public ExifSignedByteArray(ExifTagValue tag)
: base(tag)
{
}
public override ExifDataType DataType
=> ExifDataType.SignedByte;
}
}
| dlemstra/Magick.NET | src/Magick.NET.Core/Profiles/Exif/Values/ExifSignedByteArray.cs | C# | apache-2.0 | 413 |
/*
Copyright 2009 Semantic Discovery, Inc. (www.semanticdiscovery.com)
This file is part of the Semantic Discovery Toolkit.
The Semantic Discovery Toolkit is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The Semantic Discovery Toolkit is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with The Semantic Discovery Toolkit. If not, see <http://www.gnu.org/licenses/>.
*/
package org.sd.xml;
import javax.xml.transform.*;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.parsers.*;
import org.xml.sax.*;
import org.w3c.dom.*;
import java.util.*;
import java.io.*;
/**
* XML Pretty-Printing utility functions
* <p>
* @author Ryan McGuire
*/
public class XmlPrettyPrint {
/**
* Dump an XML document object to an OutputStream in a pretty format
*
* @param doc The XML document object
* @param out The OutputStream to dump to
*/
public final static void serialize(Document doc, OutputStream out) throws TransformerException {
TransformerFactory tfactory = TransformerFactory.newInstance();
Transformer serializer;
serializer = tfactory.newTransformer();
//Setup indenting to "pretty print"
serializer.setOutputProperty(OutputKeys.INDENT, "yes");
serializer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
serializer.transform(new DOMSource(doc), new StreamResult(out));
}
/**
* Format an XML string in a pretty format
*
* @param xml The XML String
*
* @return a pretty formated xml String
*/
public final static String getString(String xml) throws ParserConfigurationException, SAXException, IOException, TransformerException{
ByteArrayOutputStream pretty_xml = new ByteArrayOutputStream();
//Prepare the xml string for input
StringReader reader = new StringReader(xml);
InputSource input_source = new InputSource(reader);
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document doc = builder.parse(input_source);
serialize(doc, pretty_xml);
return pretty_xml.toString();
}
}
| KoehlerSB747/sd-tools | src/main/java/org/sd/xml/XmlPrettyPrint.java | Java | apache-2.0 | 2,616 |
package org.minazou67.sample.se8;
import java.util.Optional;
public class OptionalSample {
public static class MyClass {
// Optional 型を返却するメソッド
public static Optional<String> getHoge(String value) {
// null の場合は空の Optional を返却
return Optional.ofNullable(value);
}
}
public static void main(String[] args) {
System.out.println(MyClass.getHoge("あ").orElse("い"));
System.out.println(MyClass.getHoge(null).orElseGet(() -> "う"));
MyClass.getHoge("え").ifPresent(hoge -> System.out.println(hoge));
}
}
| minazou67/java-se-re-introduction-sample | src/main/java/org/minazou67/sample/se8/OptionalSample.java | Java | apache-2.0 | 565 |
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bisect
import collections
import copy
import functools
import time
from oslo_config import cfg
from oslo_log import log as logging
import six
from stackalytics.processor import launchpad_utils
from stackalytics.processor import user_processor
from stackalytics.processor import utils
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class RecordProcessor(object):
def __init__(self, runtime_storage_inst):
self.runtime_storage_inst = runtime_storage_inst
self.domains_index = runtime_storage_inst.get_by_key('companies')
self.releases = runtime_storage_inst.get_by_key('releases')
self.releases_dates = [r['end_date'] for r in self.releases]
self.modules = None
self.alias_module_map = None
def _get_release(self, timestamp):
release_index = bisect.bisect(self.releases_dates, timestamp)
if release_index >= len(self.releases):
LOG.warning('Timestamp %s is beyond releases boundaries, the last '
'release will be used. Please consider adding a '
'new release into default_data.json', timestamp)
release_index = len(self.releases) - 1
return self.releases[release_index]['release_name']
def _get_modules(self):
if self.modules is None:
self.modules = set()
self.alias_module_map = dict()
for repo in utils.load_repos(self.runtime_storage_inst):
module = repo['module'].lower()
module_aliases = repo.get('aliases') or []
add = True
for module_name in ([module] + module_aliases):
for m in self.modules:
if module_name.find(m) >= 0:
add = False
break
if m.find(module_name) >= 0:
self.modules.remove(m)
break
if add:
self.modules.add(module_name)
for alias in module_aliases:
self.alias_module_map[alias] = module
return self.modules, self.alias_module_map
def _need_to_fetch_launchpad(self):
return CONF.fetching_user_source == 'launchpad'
def _update_user(self, record):
email = record.get('author_email')
user_e = user_processor.load_user(
self.runtime_storage_inst, email=email) or {}
user_name = record.get('author_name')
launchpad_id = record.get('launchpad_id')
if (self._need_to_fetch_launchpad() and email and (not user_e) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
launchpad_id, lp_user_name = launchpad_utils.query_lp_info(email)
if lp_user_name:
user_name = lp_user_name
gerrit_id = record.get('gerrit_id')
if gerrit_id:
user_g = user_processor.load_user(
self.runtime_storage_inst, gerrit_id=gerrit_id) or {}
if (self._need_to_fetch_launchpad() and (not user_g) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
guessed_lp_id = gerrit_id
lp_user_name = launchpad_utils.query_lp_user_name(
guessed_lp_id)
if lp_user_name == user_name:
launchpad_id = guessed_lp_id
else:
user_g = {}
zanata_id = record.get('zanata_id')
if zanata_id:
user_z = user_processor.load_user(
self.runtime_storage_inst, zanata_id=zanata_id) or {}
if (self._need_to_fetch_launchpad() and (not user_z) and
(not launchpad_id) and (not user_e.get('launchpad_id'))):
# query LP
guessed_lp_id = zanata_id
user_name = launchpad_utils.query_lp_user_name(guessed_lp_id)
if user_name != guessed_lp_id:
launchpad_id = guessed_lp_id
else:
user_z = {}
user_l = user_processor.load_user(
self.runtime_storage_inst, launchpad_id=launchpad_id) or {}
if user_processor.are_users_same([user_e, user_l, user_g, user_z]):
# If sequence numbers are set and the same, merge is not needed
return user_e
user = user_processor.create_user(
self.domains_index, launchpad_id, email, gerrit_id, zanata_id,
user_name)
if user_e or user_l or user_g or user_z:
# merge between existing profiles and a new one
user, users_to_delete = user_processor.merge_user_profiles(
self.domains_index, [user_e, user_l, user_g, user_z, user])
# delete all unneeded profiles
user_processor.delete_users(
self.runtime_storage_inst, users_to_delete)
else:
# create new profile
if (self._need_to_fetch_launchpad() and not user_name):
user_name = launchpad_utils.query_lp_user_name(launchpad_id)
if user_name:
user['user_name'] = user_name
LOG.debug('Created new user: %s', user)
user_processor.store_user(self.runtime_storage_inst, user)
LOG.debug('Stored user: %s', user)
return user
def _update_record_and_user(self, record):
user = self._update_user(record)
record['user_id'] = user['user_id']
if user.get('user_name'):
record['author_name'] = user['user_name']
company, policy = user_processor.get_company_for_date(
user['companies'], record['date'])
if not user.get('static'):
# for auto-generated profiles affiliation may be overridden
if company != '*robots' and policy == 'open':
company = (user_processor.get_company_by_email(
self.domains_index, record.get('author_email')) or company)
record['company_name'] = company
def _process_commit(self, record):
record['primary_key'] = record['commit_id']
record['loc'] = record['lines_added'] + record['lines_deleted']
record['author_email'] = record['author_email'].lower()
record['commit_date'] = record['date']
coauthors = record.get('coauthor')
if not coauthors:
self._update_record_and_user(record)
if record['company_name'] != '*robots':
yield record
else:
if record['author_email'] not in [
c['author_email'] for c in coauthors]:
coauthors.append({'author_name': record['author_name'],
'author_email': record['author_email']})
for coauthor in coauthors:
coauthor['date'] = record['date']
self._update_record_and_user(coauthor)
for coauthor in coauthors:
new_record = copy.deepcopy(record)
new_record.update(coauthor)
new_record['primary_key'] += coauthor['author_email']
yield new_record
def _make_review_record(self, record):
# copy everything except patchsets and flatten user data
review = dict([(k, v) for k, v in six.iteritems(record)
if k not in ['patchSets', 'owner', 'createdOn',
'comments']])
owner = record['owner']
review['primary_key'] = review['id']
if owner.get('username'):
review['gerrit_id'] = owner['username']
review['author_name'] = (owner.get('name') or owner.get('username')
or 'Anonymous Coward') # do it like gerrit
if owner.get('email'):
review['author_email'] = owner['email'].lower()
review['date'] = record['createdOn']
patch_sets = record.get('patchSets', [])
review['updated_on'] = review['date']
if patch_sets:
patch = patch_sets[-1]
if 'approvals' in patch:
review['value'] = min([int(p['value'])
for p in patch['approvals']])
review['updated_on'] = patch['approvals'][0]['grantedOn']
else:
review['updated_on'] = patch['createdOn']
if 'value' not in review:
review['value'] = 0
self._update_record_and_user(review)
return review
def _make_patch_record(self, review, patch):
patch_record = dict()
patch_record['record_type'] = 'patch'
patch_record['primary_key'] = utils.get_patch_id(
review['id'], patch['number'])
patch_record['number'] = patch['number']
patch_record['date'] = patch['createdOn']
uploader = patch['uploader']
if uploader.get('username'):
patch_record['gerrit_id'] = uploader['username']
patch_record['author_name'] = (uploader.get('name')
or uploader.get('username')
or 'Anonymous Coward')
if uploader.get('email'):
patch_record['author_email'] = uploader['email'].lower()
patch_record['module'] = review['module']
patch_record['branch'] = review['branch']
patch_record['review_id'] = review['id']
self._update_record_and_user(patch_record)
return patch_record
def _make_mark_record(self, review, patch, approval):
# copy everything and flatten user data
mark = dict([(k, v) for k, v in six.iteritems(approval)
if k not in ['by', 'grantedOn', 'value', 'description']])
reviewer = approval['by']
mark['record_type'] = 'mark'
mark['value'] = int(approval['value'])
mark['date'] = approval['grantedOn']
mark['primary_key'] = (review['id'] + str(mark['date']) + mark['type'])
mark['gerrit_id'] = reviewer['username']
mark['author_name'] = reviewer.get('name') or reviewer.get('username')
mark['author_email'] = reviewer['email'].lower()
mark['module'] = review['module']
mark['branch'] = review['branch']
mark['review_id'] = review['id']
mark['patch'] = int(patch['number'])
if reviewer['username'] == patch['uploader'].get('username'):
# reviewer is the same as author of the patch
mark['type'] = 'Self-%s' % mark['type']
self._update_record_and_user(mark)
return mark
def _process_review(self, record):
"""Process a review.
Review spawns into records of three types:
* review - records that a user created review request
* patch - records that a user submitted another patch set
* mark - records that a user set approval mark to given review
"""
owner = record['owner']
if 'email' in owner or 'username' in owner:
yield self._make_review_record(record)
for patch in record.get('patchSets', []):
if (('email' in patch['uploader']) or
('username' in patch['uploader'])):
yield self._make_patch_record(record, patch)
if 'approvals' not in patch:
continue # not reviewed by anyone
for approval in patch['approvals']:
if approval['type'] not in ('Code-Review', 'Workflow'):
continue # keep only Code-Review and Workflow
if ('email' not in approval['by'] or
'username' not in approval['by']):
continue # ignore
yield self._make_mark_record(record, patch, approval)
# check for abandon action
if record.get('status') == 'ABANDONED':
for comment in reversed(record.get('comments') or []):
if comment['message'] == 'Abandoned':
action = dict(type='Abandon', value=0)
action['by'] = comment['reviewer']
action['grantedOn'] = comment['timestamp']
if ('email' not in action['by'] or
'username' not in action['by']):
continue # ignore
yield self._make_mark_record(
record, record['patchSets'][-1], action)
def _guess_module(self, record):
subject = record['subject'].lower()
pos = len(subject)
best_guess_module = None
modules, alias_module_map = self._get_modules()
for module in modules:
find = subject.find(module)
if (find >= 0) and (find < pos):
pos = find
best_guess_module = module
if best_guess_module:
if (((pos > 0) and (subject[pos - 1] == '[')) or
(not record.get('module'))):
record['module'] = best_guess_module
if not record.get('module'):
record['module'] = 'unknown'
elif record['module'] in alias_module_map:
record['module'] = alias_module_map[record['module']]
def _process_email(self, record):
record['primary_key'] = record['message_id']
record['author_email'] = record['author_email'].lower()
self._update_record_and_user(record)
self._guess_module(record)
if not record.get('blueprint_id'):
del record['body']
elif len(record['body']) > 4000:
record['body'] = record['body'][:4000] + '...'
yield record
def _process_blueprint(self, record):
bpd_author = record.get('drafter') or record.get('owner')
bpd = dict([(k, v) for k, v in six.iteritems(record)
if k.find('_link') < 0])
bpd['record_type'] = 'bpd'
bpd['primary_key'] = 'bpd:' + record['id']
bpd['launchpad_id'] = bpd_author
bpd['date'] = record['date_created']
bpd['web_link'] = record.get('web_link')
self._update_record_and_user(bpd)
yield bpd
if (record.get('assignee') and record['date_completed'] and
record.get('implementation_status') == 'Implemented'):
bpc = dict([(k, v) for k, v in six.iteritems(record)
if k.find('_link') < 0])
bpc['record_type'] = 'bpc'
bpc['primary_key'] = 'bpc:' + record['id']
bpc['launchpad_id'] = record['assignee']
bpc['date'] = record['date_completed']
self._update_record_and_user(bpc)
yield bpc
def _process_bug(self, record):
bug_created = record.copy()
bug_created['primary_key'] = 'bugf:' + record['id']
bug_created['record_type'] = 'bugf'
bug_created['launchpad_id'] = record.get('owner')
bug_created['date'] = record['date_created']
self._update_record_and_user(bug_created)
yield bug_created
FIXED_BUGS = ['Fix Committed', 'Fix Released']
if (('date_fix_committed' in record or 'date_fix_released' in record)
and record['status'] in FIXED_BUGS):
bug_fixed = record.copy()
bug_fixed['primary_key'] = 'bugr:' + record['id']
bug_fixed['record_type'] = 'bugr'
bug_fixed['launchpad_id'] = record.get('assignee') or '*unassigned'
# It appears that launchpad automatically sets the
# date_fix_committed field when a bug moves from an open
# state to Fix Released, however it isn't clear that this
# is documented. So, we take the commit date if it is
# present or the release date if no commit date is
# present.
bug_fixed['date'] = (
record.get('date_fix_committed') or
record['date_fix_released']
)
self._update_record_and_user(bug_fixed)
yield bug_fixed
def _process_member(self, record):
user_id = user_processor.make_user_id(member_id=record['member_id'])
record['primary_key'] = user_id
record['date'] = utils.member_date_to_timestamp(record['date_joined'])
record['author_name'] = record['member_name']
record['module'] = 'unknown'
company_draft = record['company_draft']
company_name = self.domains_index.get(utils.normalize_company_name(
company_draft)) or (utils.normalize_company_draft(company_draft))
# author_email is a key to create new user
record['author_email'] = user_id
record['company_name'] = company_name
# _update_record_and_user function will create new user if needed
self._update_record_and_user(record)
record['company_name'] = company_name
user = user_processor.load_user(self.runtime_storage_inst,
user_id=user_id)
user['user_name'] = record['author_name']
user['companies'] = [{
'company_name': company_name,
'end_date': 0,
}]
user['company_name'] = company_name
user_processor.store_user(self.runtime_storage_inst, user)
record['company_name'] = company_name
yield record
def _process_translation(self, record):
# todo split translation and approval
translation = record.copy()
user_id = user_processor.make_user_id(zanata_id=record['zanata_id'])
translation['record_type'] = 'tr'
translation['primary_key'] = '%s:%s:%s:%s' % (
user_id, record['module'], record['date'], record['branch'])
translation['author_name'] = user_id
# following fields are put into standard fields stored in dashboard mem
translation['loc'] = record['translated']
translation['value'] = record['language']
self._update_record_and_user(translation)
yield translation
def _renew_record_date(self, record):
record['week'] = utils.timestamp_to_week(record['date'])
if ('release' not in record) or (not record['release']):
record['release'] = self._get_release(record['date'])
def process(self, record_iterator):
PROCESSORS = {
'commit': self._process_commit,
'review': self._process_review,
'email': self._process_email,
'bp': self._process_blueprint,
'bug': self._process_bug,
'member': self._process_member,
'i18n': self._process_translation,
}
for record in record_iterator:
for r in PROCESSORS[record['record_type']](record):
self._renew_record_date(r)
yield r
def _update_records_with_releases(self, release_index):
LOG.info('Update records with releases')
def record_handler(record):
if (record['record_type'] == 'commit'
and record['primary_key'] in release_index):
release = release_index[record['primary_key']]
else:
release = self._get_release(record['date'])
if record['release'] != release:
record['release'] = release
yield record
yield record_handler
def _update_records_with_user_info(self):
LOG.info('Update user info in records')
def record_handler(record):
company_name = record['company_name']
user_id = record['user_id']
author_name = record['author_name']
self._update_record_and_user(record)
if ((record['company_name'] != company_name) or
(record['user_id'] != user_id) or
(record['author_name'] != author_name)):
LOG.debug('User info (%(id)s, %(name)s, %(company)s) has '
'changed in record %(record)s',
{'id': user_id, 'name': author_name,
'company': company_name, 'record': record})
yield record
yield record_handler
def _update_commits_with_merge_date(self):
LOG.info('Update commits with merge date')
change_id_to_date = {}
def record_handler_pass_1(record):
if (record['record_type'] == 'review' and
record.get('status') == 'MERGED'):
change_id_to_date[record['id']] = record['lastUpdated']
yield record_handler_pass_1
LOG.info('Update commits with merge date: pass 2')
def record_handler_pass_2(record):
if record['record_type'] == 'commit':
change_id_list = record.get('change_id')
if change_id_list and len(change_id_list) == 1:
change_id = change_id_list[0]
if change_id in change_id_to_date:
old_date = record['date']
if old_date != change_id_to_date[change_id]:
record['date'] = change_id_to_date[change_id]
self._renew_record_date(record)
LOG.debug('Date %(date)s has changed in record '
'%(record)s', {'date': old_date,
'record': record})
yield record
yield record_handler_pass_2
def _update_blueprints_with_mention_info(self):
LOG.info('Process blueprints and calculate mention info')
valid_blueprints = {}
mentioned_blueprints = {}
def record_handler_pass_1(record):
for bp in record.get('blueprint_id', []):
if bp in mentioned_blueprints:
mentioned_blueprints[bp]['count'] += 1
if record['date'] > mentioned_blueprints[bp]['date']:
mentioned_blueprints[bp]['date'] = record['date']
else:
mentioned_blueprints[bp] = {
'count': 1,
'date': record['date']
}
if record['record_type'] in ['bpd', 'bpc']:
valid_blueprints[record['id']] = {
'primary_key': record['primary_key'],
'count': 0,
'date': record['date']
}
yield record_handler_pass_1
for bp_name, bp in six.iteritems(valid_blueprints):
if bp_name in mentioned_blueprints:
bp['count'] = mentioned_blueprints[bp_name]['count']
bp['date'] = mentioned_blueprints[bp_name]['date']
else:
bp['count'] = 0
bp['date'] = 0
LOG.info('Process blueprints and calculate mention info: pass 2')
def record_handler_pass_2(record):
need_update = False
valid_bp = set([])
for bp in record.get('blueprint_id', []):
if bp in valid_blueprints:
valid_bp.add(bp)
else:
LOG.debug('Update record %s: removed invalid bp: %s',
record['primary_key'], bp)
need_update = True
record['blueprint_id'] = list(valid_bp)
if record['record_type'] in ['bpd', 'bpc']:
bp = valid_blueprints[record['id']]
if ((record.get('mention_count') != bp['count']) or
(record.get('mention_date') != bp['date'])):
record['mention_count'] = bp['count']
record['mention_date'] = bp['date']
LOG.debug('Update record %s: mention stats: (%s:%s)',
record['primary_key'], bp['count'], bp['date'])
need_update = True
if need_update:
yield record
yield record_handler_pass_2
def _determine_core_contributors(self):
LOG.info('Determine core contributors')
module_branches = collections.defaultdict(set)
quarter_ago = int(time.time()) - 60 * 60 * 24 * 30 * 3 # a quarter ago
def record_handler(record):
if (record['record_type'] == 'mark' and
record['date'] > quarter_ago and
record['value'] in [2, -2]):
module_branch = (record['module'], record['branch'])
user_id = record['user_id']
module_branches[user_id].add(module_branch)
yield record_handler
for user in self.runtime_storage_inst.get_all_users():
core_old = user.get('core')
user_module_branch = module_branches.get(user['user_id'])
if user_module_branch:
user['core'] = list(user_module_branch)
elif user.get('core'):
del user['core']
if user.get('core') != core_old:
user_processor.store_user(self.runtime_storage_inst, user)
def _close_patch(self, cores, marks):
if len(marks) < 2:
return
core_mark = 0
for mark in sorted(marks, key=lambda x: x['date'], reverse=True):
if core_mark == 0:
if (mark['module'], mark['branch'], mark['user_id']) in cores:
# mark is from core engineer
core_mark = mark['value']
continue
disagreement = ((core_mark != 0) and
((core_mark < 0 < mark['value']) or
(core_mark > 0 > mark['value'])))
old_disagreement = mark.get('disagreement', False)
mark['disagreement'] = disagreement
if old_disagreement != disagreement:
yield mark
def _update_marks_with_disagreement(self):
LOG.info('Process marks to find disagreements')
cores = set()
for user in self.runtime_storage_inst.get_all_users():
for (module, branch) in (user.get('core') or []):
cores.add((module, branch, user['user_id']))
# map from review_id to current patch and list of marks
marks_per_patch = collections.defaultdict(
lambda: {'patch_number': 0, 'marks': []})
def record_handler(record):
if (record['record_type'] == 'mark' and
record['type'] == 'Code-Review'):
review_id = record['review_id']
patch_number = record['patch']
if review_id in marks_per_patch:
# review is already seen, check if patch is newer
if (marks_per_patch[review_id]['patch_number'] <
patch_number):
# the patch is new, close the current
for processed in self._close_patch(
cores, marks_per_patch[review_id]['marks']):
yield processed
del marks_per_patch[review_id]
marks_per_patch[review_id]['patch_number'] = patch_number
marks_per_patch[review_id]['marks'].append(record)
yield record_handler
# purge the rest
for marks_patch in marks_per_patch.values():
self.runtime_storage_inst.set_records(
self._close_patch(cores, marks_patch['marks']))
def _update_members_company_name(self):
LOG.info('Update members with company names')
def record_handler(record):
if record['record_type'] != 'member':
return
company_draft = record['company_draft']
company_name = self.domains_index.get(
utils.normalize_company_name(company_draft)) or (
utils.normalize_company_draft(company_draft))
if company_name == record['company_name']:
return
LOG.debug('Update record %s, company name changed to %s',
record, company_name)
record['company_name'] = company_name
yield record
user = user_processor.load_user(self.runtime_storage_inst,
user_id=record['user_id'])
LOG.debug('Update user %s, company name changed to %s',
user, company_name)
user['companies'] = [{
'company_name': company_name,
'end_date': 0,
}]
user_processor.store_user(self.runtime_storage_inst, user)
yield record_handler
def _update_commits_with_module_alias(self):
LOG.info('Update record with aliases')
modules, alias_module_map = self._get_modules()
def record_handler(record):
if record['record_type'] != 'commit':
return
rec_module = record.get('module', None)
if rec_module and rec_module in alias_module_map:
record['module'] = alias_module_map[rec_module]
yield record
yield record_handler
def post_processing(self, release_index):
processors = [
self._update_records_with_user_info,
self._update_commits_with_merge_date,
functools.partial(self._update_records_with_releases,
release_index),
self._update_commits_with_module_alias,
self._update_blueprints_with_mention_info,
self._determine_core_contributors,
self._update_members_company_name,
self._update_marks_with_disagreement,
]
pipeline_processor = utils.make_pipeline_processor(processors)
self.runtime_storage_inst.set_records(pipeline_processor(
self.runtime_storage_inst.get_all_records))
| 0xf2/stackalytics | stackalytics/processor/record_processor.py | Python | apache-2.0 | 30,623 |
package com.thesecretpie.shader.test;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.InputAdapter;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.assets.AssetManager;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.graphics.Mesh;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.math.Vector3;
import com.thesecretpie.shader.ShaderManager;
public class SimpleTest extends InputAdapter implements ApplicationListener {
ShaderManager sm;
Mesh cube;
AssetManager am = new AssetManager();
Matrix4 projection = new Matrix4();
Matrix4 view = new Matrix4();
Matrix4 model = new Matrix4();
Matrix4 combined = new Matrix4();
Vector3 axis = new Vector3(1, 0, 1).nor();
float angle = 45;
@Override
public void create() {
Gdx.input.setInputProcessor(this);
ShaderProgram.pedantic = false;
sm = new ShaderManager("assets/shaders", am);
sm.add("empty", "empty.vert", "empty.frag");
sm.add("default", "default.vert", "default.frag");
cube = Shapes.genCube();
}
@Override
public void dispose() {
sm.dispose();
am.dispose();
}
@Override
public void pause() {
// TODO Auto-generated method stub
}
@Override
public void render() {
angle += Gdx.graphics.getDeltaTime() * 40.0f;
float aspect = Gdx.graphics.getWidth() / (float)Gdx.graphics.getHeight();
projection.setToProjection(1.0f, 20.0f, 60.0f, aspect);
view.idt().trn(0, 0, -2.0f);
model.setToRotation(axis, angle);
combined.set(projection).mul(view).mul(model);
Gdx.gl20.glViewport(0, 0, Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
Gdx.gl20.glClear(GL20.GL_COLOR_BUFFER_BIT);
sm.begin("empty");
sm.setUniformMatrix("u_worldView", combined);
cube.render(sm.getCurrent(), GL20.GL_TRIANGLES);
sm.end();
}
@Override
public void resize(int width, int height) {
}
@Override
public void resume() {
// TODO Auto-generated method stub
}
@Override
public boolean keyDown(int keycode) {
switch (keycode) {
case Keys.R:
sm.reload(); break;
}
return false;
}
}
| Rsgm/gdx-shaders | gdx-shaders-test/src/com/thesecretpie/shader/test/SimpleTest.java | Java | apache-2.0 | 2,239 |
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.config;
import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.cache.annotation.CacheDefaults;
import javax.cache.annotation.CacheKey;
import javax.cache.annotation.CachePut;
import javax.cache.annotation.CacheRemove;
import javax.cache.annotation.CacheRemoveAll;
import javax.cache.annotation.CacheResult;
import javax.cache.annotation.CacheValue;
import org.springframework.cache.Cache;
import org.springframework.cache.interceptor.SimpleKeyGenerator;
import org.springframework.contextsupport.testfixture.cache.TestableCacheKeyGenerator;
import org.springframework.contextsupport.testfixture.cache.TestableCacheResolverFactory;
import org.springframework.contextsupport.testfixture.jcache.JCacheableService;
/**
* Repository sample with a @CacheDefaults annotation
*
* <p>Note: copy/pasted from its original compilation because it needs to be
* processed by the AspectJ compiler to wave the required aspects.
*
* @author Stephane Nicoll
*/
@CacheDefaults(cacheName = "default")
public class AnnotatedJCacheableService implements JCacheableService<Long> {
private final AtomicLong counter = new AtomicLong();
private final AtomicLong exceptionCounter = new AtomicLong();
private final Cache defaultCache;
public AnnotatedJCacheableService(Cache defaultCache) {
this.defaultCache = defaultCache;
}
@Override
@CacheResult
public Long cache(String id) {
return counter.getAndIncrement();
}
@Override
@CacheResult
public Long cacheNull(String id) {
return null;
}
@Override
@CacheResult(exceptionCacheName = "exception", nonCachedExceptions = NullPointerException.class)
public Long cacheWithException(@CacheKey String id, boolean matchFilter) {
throwException(matchFilter);
return 0L; // Never reached
}
@Override
@CacheResult(exceptionCacheName = "exception", nonCachedExceptions = NullPointerException.class)
public Long cacheWithCheckedException(@CacheKey String id, boolean matchFilter) throws IOException {
throwCheckedException(matchFilter);
return 0L; // Never reached
}
@Override
@CacheResult(skipGet = true)
public Long cacheAlwaysInvoke(String id) {
return counter.getAndIncrement();
}
@Override
@CacheResult
public Long cacheWithPartialKey(@CacheKey String id, boolean notUsed) {
return counter.getAndIncrement();
}
@Override
@CacheResult(cacheResolverFactory = TestableCacheResolverFactory.class)
public Long cacheWithCustomCacheResolver(String id) {
return counter.getAndIncrement();
}
@Override
@CacheResult(cacheKeyGenerator = TestableCacheKeyGenerator.class)
public Long cacheWithCustomKeyGenerator(String id, String anotherId) {
return counter.getAndIncrement();
}
@Override
@CachePut
public void put(String id, @CacheValue Object value) {
}
@Override
@CachePut(cacheFor = UnsupportedOperationException.class)
public void putWithException(@CacheKey String id, @CacheValue Object value, boolean matchFilter) {
throwException(matchFilter);
}
@Override
@CachePut(afterInvocation = false)
public void earlyPut(String id, @CacheValue Object value) {
Object key = SimpleKeyGenerator.generateKey(id);
Cache.ValueWrapper valueWrapper = defaultCache.get(key);
if (valueWrapper == null) {
throw new AssertionError("Excepted value to be put in cache with key " + key);
}
Object actual = valueWrapper.get();
if (value != actual) { // instance check on purpose
throw new AssertionError("Wrong value set in cache with key " + key + ". " +
"Expected=" + value + ", but got=" + actual);
}
}
@Override
@CachePut(afterInvocation = false)
public void earlyPutWithException(@CacheKey String id, @CacheValue Object value, boolean matchFilter) {
throwException(matchFilter);
}
@Override
@CacheRemove
public void remove(String id) {
}
@Override
@CacheRemove(noEvictFor = NullPointerException.class)
public void removeWithException(@CacheKey String id, boolean matchFilter) {
throwException(matchFilter);
}
@Override
@CacheRemove(afterInvocation = false)
public void earlyRemove(String id) {
Object key = SimpleKeyGenerator.generateKey(id);
Cache.ValueWrapper valueWrapper = defaultCache.get(key);
if (valueWrapper != null) {
throw new AssertionError("Value with key " + key + " expected to be already remove from cache");
}
}
@Override
@CacheRemove(afterInvocation = false, evictFor = UnsupportedOperationException.class)
public void earlyRemoveWithException(@CacheKey String id, boolean matchFilter) {
throwException(matchFilter);
}
@Override
@CacheRemoveAll
public void removeAll() {
}
@Override
@CacheRemoveAll(noEvictFor = NullPointerException.class)
public void removeAllWithException(boolean matchFilter) {
throwException(matchFilter);
}
@Override
@CacheRemoveAll(afterInvocation = false)
public void earlyRemoveAll() {
ConcurrentHashMap<?, ?> nativeCache = (ConcurrentHashMap<?, ?>) defaultCache.getNativeCache();
if (!nativeCache.isEmpty()) {
throw new AssertionError("Cache was expected to be empty");
}
}
@Override
@CacheRemoveAll(afterInvocation = false, evictFor = UnsupportedOperationException.class)
public void earlyRemoveAllWithException(boolean matchFilter) {
throwException(matchFilter);
}
@Deprecated
public void noAnnotation() {
}
@Override
public long exceptionInvocations() {
return exceptionCounter.get();
}
private void throwException(boolean matchFilter) {
long count = exceptionCounter.getAndIncrement();
if (matchFilter) {
throw new UnsupportedOperationException("Expected exception (" + count + ")");
}
else {
throw new NullPointerException("Expected exception (" + count + ")");
}
}
private void throwCheckedException(boolean matchFilter) throws IOException {
long count = exceptionCounter.getAndIncrement();
if (matchFilter) {
throw new IOException("Expected exception (" + count + ")");
}
else {
throw new NullPointerException("Expected exception (" + count + ")");
}
}
}
| spring-projects/spring-framework | spring-aspects/src/test/java/org/springframework/cache/config/AnnotatedJCacheableService.java | Java | apache-2.0 | 6,679 |
package org.aries.service.model;
import java.io.InputStream;
import java.net.URL;
import org.apache.commons.digester.Digester;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.aries.nam.model.old.ActionDefinition;
import org.aries.nam.model.old.ParameterDefinition;
import org.aries.nam.model.old.ResultDefinition;
import org.aries.nam.model.old.ServiceDefinition;
import org.aries.util.ResourceUtil;
public class ServiceDigester {
private static Log log = LogFactory.getLog(ServiceDigester.class);
private String _servicesFile;
private ClassLoader _classLoader;
public ServiceDigester(String servicesFile) {
_servicesFile = servicesFile;
}
public ServiceDigester(ClassLoader classLoader, String servicesFile) {
this(servicesFile);
_classLoader = classLoader;
}
public String getSource() {
return _servicesFile;
}
public ServiceDescripterMap execute() throws Exception {
InputStream stream = null;
try {
String source = getSource();
if (_classLoader != null)
Thread.currentThread().setContextClassLoader(_classLoader);
URL url = ResourceUtil.getResource(source);
if (url == null) {
String message = "Service configuration file not found: "+source;
log.warn(message);
//throw new Exception(message);
return null;
}
stream = url.openStream();
ServiceDescripterMap configuration = digest(stream);
return configuration;
} finally {
if (stream != null)
stream.close();
}
}
public ServiceDescripterMap digest(InputStream stream) throws Exception {
Digester digester = new Digester();
digester.setClassLoader(Thread.currentThread().getContextClassLoader());
digester.addObjectCreate("services", ServiceDescripterMap.class);
digester.addObjectCreate("services/service", ServiceDefinition.class);
digester.addSetProperties("services/service", "group", "groupName");
digester.addSetProperties("services/service", "name", "serviceName");
digester.addSetProperties("services/service", "class", "className");
digester.addSetProperties("services/service", "description", "description");
digester.addObjectCreate("services/service/action", ActionDefinition.class);
digester.addSetProperties("services/service/action", "name", "actionName");
digester.addSetProperties("services/service/action", "class", "className");
digester.addSetProperties("services/service/action", "description", "description");
digester.addObjectCreate("services/service/action/parameter", ParameterDefinition.class);
digester.addSetProperties("services/service/action/parameter", "name", "name");
digester.addSetProperties("services/service/action/parameter", "type", "type");
digester.addObjectCreate("services/service/action/result", ResultDefinition.class);
digester.addSetProperties("services/service/action/result", "name", "name");
digester.addSetProperties("services/service/action/result", "type", "type");
digester.addSetNext("services/service/action/parameter", "addParameterDescriptor");
digester.addSetNext("services/service/action/result", "setResultDescriptor");
digester.addSetNext("services/service/action", "addActionDescriptor");
digester.addSetNext("services/service", "addServiceDescriptor");
digester.parse(stream);
ServiceDescripterMap configuration = (ServiceDescripterMap) digester.getRoot();
return configuration;
}
}
| tfisher1226/ARIES | aries/common/common-runtime/src/main/java/org/aries/service/model/ServiceDigester.java | Java | apache-2.0 | 3,641 |
/**
* KnowledgeWorkbenchModel
* A workbench for wiring relations among topics
*/
var types = require('tqtopicmap/lib/types'),
icons = require('tqtopicmap/lib/icons'),
properties = require('tqtopicmap/lib/properties'),
constants = require('../../core/constants'),
tagmodel = require('../tag/tagmodel'),
relationlist = require('./relationlist')
;
var KnowledgeWorkbenchModel = module.exports = function(environment, cm, tmenv) {
var CommonModel = cm;
if (!CommonModel) {
CommonModel = environment.getCommonModel();
}
var myEnvironment = environment;
var topicMapEnvironment = tmenv;
if (!topicMapEnvironment) {
topicMapEnvironment = environment.getTopicMapEnvironment();
}
var DataProvider = topicMapEnvironment.getDataProvider(),
TopicModel = topicMapEnvironment.getTopicModel(),
RelationModel = topicMapEnvironment.getRelationModel(),
TagModel = new tagmodel(environment, CommonModel, topicMapEnvironment),
queryDSL = topicMapEnvironment.getQueryDSL(),
relationForm = [];
function buildForm() {
var x = relationlist.RelationList;
var y;
for (var i=0;i<x.length;i++) {
y = {};
y.val = x[i];
relationForm.push(y);
}
}
buildForm();
var self = this;
self.getNewRelationForm = function() {
return relationForm;
};
self.arrayContains = function(array, value) {
return (array.indexOf(value) > -1);
};
/**
* Called from CommonModel.__doAjaxFetch
* @param theNode
* @param callback signature (result)
*/
self.showRelations = function(theNode, callback) {
console.log("KnowledgeWorkbenchModel.showRelations "+theNode);
var result = [];
//TODO this ignores private relations
//This looks at tuples and shows those which are found on the RelationList;
// Push relation structs to result
var relns = theNode.listRelations();
myEnvironment.logDebug("KnowledgeWorkbenchModel.showRelations "+theNode.getLocator()+" "+relns);
if (relns) {
var len = relns.length;
var rx;
for (var i=0;i<len;i++) {
rx = relns[i];
if (self.arrayContains(relationlist.RelationList, rx.relationType)) {
result.push(rx);
}
}
return callback(result);
} else {
return callback(result);
}
};
/**
* Create a connection between two nodes
* @param sourceLocator
* @param targetLocator
* @param relationType
* @param userObject
* @param taglist
* @param callback signature (err, relationnode)
*/
//TODO needs isPrivate
self.createConnection = function(sourceLocator, targetLocator, relationType, userObject, taglist, callback) {
console.log("KnowledgeWorkbenchModel.createConnection "+sourceLocator+" "+targetLocator+" "+relationType);
var error = "",
result, //the tuple
sourceNode,
targetNode,
userTopic,
isPrivate = false; //TODO
credentials = userObject.credentials,
userLocator = userObject.handle;
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection "+sourceLocator+" "+targetLocator+" "+relationType+" "+taglist+" "+userLocator);
//get UserTopic
DataProvider.getNodeByLocator(userLocator,credentials, function(err,usr) {
userTopic = usr;
DataProvider.getNodeByLocator(sourceLocator,credentials,function(err,p1) {
if (err) {error += err;}
sourceNode = p1;
DataProvider.getNodeByLocator(targetLocator,credentials,function(err,p2) {
if (err) {error += err;}
targetNode = p2;
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection-x ");
RelationModel.createRelation(sourceNode,targetNode,relationType,userLocator,credentials,isPrivate,function(err,data) {
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection-1 "+err+" "+data);
//TODO seeing some kind of internal bug which returns undefined
if (err) {error += err;}
result = data;
if (result) {
myEnvironment.addRecentConnection(result.getLocator(), result.getLabel("en"));
//If there are tags, process them
if (taglist.length > 0) {
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection-2 "+userLocator+" | "+err+" | "+userTopic);
TagModel.processTagList(taglist, userTopic, result, credentials, function(err,rsx) {
if (err) {error += err;}
console.log('NEW_POST-1 '+rsx);
//result could be an empty list;
//TagModel already added Tag_Doc and Doc_Tag relations
console.log("ARTICLES_CREATE_2 "+JSON.stringify(rsx));
DataProvider.putNode(result, function(err,data) {
console.log('ARTICLES_CREATE-3 '+err);
if (err) {console.log('ARTICLES_CREATE-3a '+err)}
console.log('ARTICLES_CREATE-3b '+userTopic);
TopicModel.relateExistingNodesAsPivots(userTopic,result,types.CREATOR_DOCUMENT_RELATION_TYPE,
userTopic.getLocator(),
icons.RELATION_ICON, icons.RELATION_ICON, false, credentials, function(err,data) {
if (err) {console.log('ARTICLES_CREATE-3d '+err);}
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection-3 "+userLocator+" | "+err+" | "+result);
//modified to return entire node
return callback(err,result);
}); //r1
}); //putnode
}); // processtaglist
} else {
DataProvider.putNode(result, function(err,data) {
console.log('ARTICLES_CREATE-3 '+err);
if (err) {console.log('ARTICLES_CREATE-3a '+err)}
console.log('ARTICLES_CREATE-3b '+userTopic);
TopicModel.relateExistingNodesAsPivots(userTopic,result,types.CREATOR_DOCUMENT_RELATION_TYPE,
userTopic.getLocator(),icons.RELATION_ICON, icons.RELATION_ICON, false, credentials, function(err,data) {
if (err) {console.log('ARTICLES_CREATE-3d '+err);}
myEnvironment.logDebug("KnowledgeWorkbenchModel.createConnection-4 "+userLocator+" | "+err+" | "+result);
callback(err,result);
}); //r1
}); //putnode
}
} else {
return callback(error,result);
}
}); //relatenodes
}); //gettarget
}); // getsource
}); // getuser
};
}; | KnowledgeGarden/TQPortal | apps/kwb/kwbmodel.js | JavaScript | apache-2.0 | 7,937 |
package edu.gatech.gtri.orafile;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.*;
import java.util.regex.Pattern;
public class OrafileRenderer {
final boolean sortByKey;
public OrafileRenderer() {
sortByKey = false;
}
OrafileRenderer(boolean sortByKey) {
this.sortByKey = sortByKey;
}
/**
* @param sortByKey True to sort entries by key. False to preserve
* original ordering.
* @return A new {@link OrafileRenderer}.
*/
public OrafileRenderer sortByKey(boolean sortByKey) {
return new OrafileRenderer(sortByKey);
}
private enum Parens {
Yes, No;
boolean yes() {
return this == Yes;
}
}
public String renderFile(OrafileDict dict) {
StringWriter writer = new StringWriter();
try {
renderFile(dict, writer);
} catch (IOException e) {
throw new RuntimeException(e);
}
return writer.toString();
}
public void renderFile(OrafileDict dict, Writer writer)
throws IOException {
Iterator<OrafileDef> defs = defs(dict).iterator();
while (defs.hasNext()) {
OrafileDef def = defs.next();
renderDef(writer, def, Parens.No, "");
if (defs.hasNext()) {
writer.append("\n");
}
}
}
void renderDef(Writer writer, OrafileDef def, Parens parens,
String indent) throws IOException {
String nextIndent = indent + " ";
OrafileVal val = def.getVal();
if (val instanceof OrafileString) {
String stringVal = ((OrafileString) val).string;
writer.append(indent);
if (parens.yes()) writer.append("(");
writer.append(def.getName()).append(" = ");
renderString(writer, stringVal);
if (parens.yes()) writer.append(")\n");
} else if (val instanceof OrafileStringList) {
OrafileStringList stringListVal = (OrafileStringList) val;
writer.append(indent);
if (parens.yes()) writer.append("(");
writer.append(def.getName()).append(" = (\n");
Iterator<String> stringVals = stringListVal.list.iterator();
while (stringVals.hasNext()) {
String stringVal = stringVals.next();
writer.append(nextIndent);
renderString(writer, stringVal);
if (stringVals.hasNext()) writer.append(",");
writer.append("\n");
}
writer.append(indent).append(")");
if (parens.yes()) writer.append(")\n");
} else {
OrafileDict dict = (OrafileDict) val;
writer.append(indent);
if (parens.yes()) writer.append("(");
writer.append(def.getName()).append(" =\n");
for (OrafileDef nextDef : defs(dict)) {
renderDef(writer, nextDef, Parens.Yes, nextIndent);
}
if (parens.yes()) writer.append(indent).append(")\n");
}
}
static final Pattern SAFE_STRING =
Pattern.compile("^[A-Za-z0-9\\Q<>/.:;-_$+*&!%?@\\E]+$");
void renderString(Writer writer, String string) throws IOException {
if (SAFE_STRING.matcher(string).matches()) {
writer.append(string);
} else {
String escaped = string
.replaceAll("\\\\", "\\\\\\\\")
.replaceAll("\"", "\\\\\"");
writer.append("\"").append(escaped).append("\"");
}
}
List<OrafileDef> defs(OrafileDict dict) {
if (!sortByKey) return dict.list;
List<OrafileDef> defs = new ArrayList<OrafileDef>(dict.list);
Collections.sort(defs, DEF_KEY_COMPARATOR);
return defs;
}
static final Comparator<OrafileDef> DEF_KEY_COMPARATOR =
new Comparator<OrafileDef>() {
@Override
public int compare(OrafileDef a, OrafileDef b) {
return a.getName().compareTo(b.getName());
}
};
}
| gtri/orafile | src/main/java/edu/gatech/gtri/orafile/OrafileRenderer.java | Java | apache-2.0 | 4,138 |
#!/usr/bin/env ruby
require 'spec_helper'
describe 'oracle_java' do
context 'supported operating systems' do
on_supported_os.each do |os, facts|
context "on #{os} with puppet v#{Puppet.version}" do
let(:facts) do
facts
end
context "oracle_java class without any parameters" do
let(:params) {{ }}
latest_ver = '8u66'
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('oracle_java')}
it { is_expected.to contain_class('oracle_java::params') }
it { is_expected.to contain_oracle_java__install(latest_ver).with(:java_type => 'jre')}
it { is_expected.to contain_archive("/usr/java/.dl_cache/jre-#{latest_ver}-linux-x64.tar.gz")}
it { is_expected.to contain_oracle_java__alternative(latest_ver)}
#it { is_expected.to contain_oracle_java__install(latest_ver).with(:java_type => 'jre') }
#.that_comes_before('oracle_java::config') }
#it { is_expected.to contain_class('oracle_java::config') }
#it { is_expected.to contain_class('oracle_java::service').that_subscribes_to('oracle_java::config') }
#it { is_expected.to contain_service('oracle_java') }
#it { is_expected.to contain_package('oracle_java').with_ensure('present') }
end
end
end
end
# context 'unsupported operating system' do
# describe 'oracle_java class without any parameters on Solaris/Nexenta' do
# let(:facts) {{
# :osfamily => 'Solaris',
# :operatingsystem => 'Nexenta',
# }}
#
# it { expect { is_expected.to contain_package('oracle_java') }.to raise_error(Puppet::Error, /Nexenta not supported/) }
# end
# end
end
| adamcrews/oracle_java | spec/classes/oracle_java_spec.rb | Ruby | apache-2.0 | 1,761 |
package org.hl7.fhir.instance.model.valuesets;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0
import org.hl7.fhir.instance.model.EnumFactory;
public class V3ActRelationshipTypeEnumFactory implements EnumFactory<V3ActRelationshipType> {
public V3ActRelationshipType fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
return null;
if ("ART".equals(codeString))
return V3ActRelationshipType.ART;
if ("_ActClassTemporallyPertains".equals(codeString))
return V3ActRelationshipType._ACTCLASSTEMPORALLYPERTAINS;
if ("_ActRelationshipAccounting".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPACCOUNTING;
if ("_ActRelationshipCostTracking".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPCOSTTRACKING;
if ("CHRG".equals(codeString))
return V3ActRelationshipType.CHRG;
if ("COST".equals(codeString))
return V3ActRelationshipType.COST;
if ("_ActRelationshipPosting".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPPOSTING;
if ("CREDIT".equals(codeString))
return V3ActRelationshipType.CREDIT;
if ("DEBIT".equals(codeString))
return V3ActRelationshipType.DEBIT;
if ("_ActRelationshipConditional".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPCONDITIONAL;
if ("CIND".equals(codeString))
return V3ActRelationshipType.CIND;
if ("PRCN".equals(codeString))
return V3ActRelationshipType.PRCN;
if ("RSON".equals(codeString))
return V3ActRelationshipType.RSON;
if ("BLOCK".equals(codeString))
return V3ActRelationshipType.BLOCK;
if ("DIAG".equals(codeString))
return V3ActRelationshipType.DIAG;
if ("IMM".equals(codeString))
return V3ActRelationshipType.IMM;
if ("ACTIMM".equals(codeString))
return V3ActRelationshipType.ACTIMM;
if ("PASSIMM".equals(codeString))
return V3ActRelationshipType.PASSIMM;
if ("MITGT".equals(codeString))
return V3ActRelationshipType.MITGT;
if ("RCVY".equals(codeString))
return V3ActRelationshipType.RCVY;
if ("PRYLX".equals(codeString))
return V3ActRelationshipType.PRYLX;
if ("TREAT".equals(codeString))
return V3ActRelationshipType.TREAT;
if ("ADJUNCT".equals(codeString))
return V3ActRelationshipType.ADJUNCT;
if ("MTREAT".equals(codeString))
return V3ActRelationshipType.MTREAT;
if ("PALLTREAT".equals(codeString))
return V3ActRelationshipType.PALLTREAT;
if ("SYMP".equals(codeString))
return V3ActRelationshipType.SYMP;
if ("TRIG".equals(codeString))
return V3ActRelationshipType.TRIG;
if ("_ActRelationshipTemporallyPertains".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPTEMPORALLYPERTAINS;
if ("_ActRelationshipTemporallyPertainsApproximates".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSHIPTEMPORALLYPERTAINSAPPROXIMATES;
if ("ENE".equals(codeString))
return V3ActRelationshipType.ENE;
if ("ECW".equals(codeString))
return V3ActRelationshipType.ECW;
if ("CONCURRENT".equals(codeString))
return V3ActRelationshipType.CONCURRENT;
if ("SBSECWE".equals(codeString))
return V3ActRelationshipType.SBSECWE;
if ("ENS".equals(codeString))
return V3ActRelationshipType.ENS;
if ("ECWS".equals(codeString))
return V3ActRelationshipType.ECWS;
if ("SNE".equals(codeString))
return V3ActRelationshipType.SNE;
if ("SCWE".equals(codeString))
return V3ActRelationshipType.SCWE;
if ("SNS".equals(codeString))
return V3ActRelationshipType.SNS;
if ("SCW".equals(codeString))
return V3ActRelationshipType.SCW;
if ("SCWSEBE".equals(codeString))
return V3ActRelationshipType.SCWSEBE;
if ("SCWSEAE".equals(codeString))
return V3ActRelationshipType.SCWSEAE;
if ("EAS".equals(codeString))
return V3ActRelationshipType.EAS;
if ("EAE".equals(codeString))
return V3ActRelationshipType.EAE;
if ("SASEAE".equals(codeString))
return V3ActRelationshipType.SASEAE;
if ("SBEEAE".equals(codeString))
return V3ActRelationshipType.SBEEAE;
if ("SASSBEEAS".equals(codeString))
return V3ActRelationshipType.SASSBEEAS;
if ("SBSEAE".equals(codeString))
return V3ActRelationshipType.SBSEAE;
if ("SAS".equals(codeString))
return V3ActRelationshipType.SAS;
if ("SAE".equals(codeString))
return V3ActRelationshipType.SAE;
if ("DURING".equals(codeString))
return V3ActRelationshipType.DURING;
if ("SASECWE".equals(codeString))
return V3ActRelationshipType.SASECWE;
if ("EASORECWS".equals(codeString))
return V3ActRelationshipType.EASORECWS;
if ("EAEORECW".equals(codeString))
return V3ActRelationshipType.EAEORECW;
if ("INDEPENDENT".equals(codeString))
return V3ActRelationshipType.INDEPENDENT;
if ("SAEORSCWE".equals(codeString))
return V3ActRelationshipType.SAEORSCWE;
if ("SASORSCW".equals(codeString))
return V3ActRelationshipType.SASORSCW;
if ("SBEORSCWE".equals(codeString))
return V3ActRelationshipType.SBEORSCWE;
if ("OVERLAP".equals(codeString))
return V3ActRelationshipType.OVERLAP;
if ("EDU".equals(codeString))
return V3ActRelationshipType.EDU;
if ("SBSEASEBE".equals(codeString))
return V3ActRelationshipType.SBSEASEBE;
if ("SBSEAS".equals(codeString))
return V3ActRelationshipType.SBSEAS;
if ("SDU".equals(codeString))
return V3ActRelationshipType.SDU;
if ("SBE".equals(codeString))
return V3ActRelationshipType.SBE;
if ("EBE".equals(codeString))
return V3ActRelationshipType.EBE;
if ("SBSEBE".equals(codeString))
return V3ActRelationshipType.SBSEBE;
if ("EBSORECWS".equals(codeString))
return V3ActRelationshipType.EBSORECWS;
if ("EBS".equals(codeString))
return V3ActRelationshipType.EBS;
if ("EBEORECW".equals(codeString))
return V3ActRelationshipType.EBEORECW;
if ("SBSORSCW".equals(codeString))
return V3ActRelationshipType.SBSORSCW;
if ("SBS".equals(codeString))
return V3ActRelationshipType.SBS;
if ("AUTH".equals(codeString))
return V3ActRelationshipType.AUTH;
if ("CAUS".equals(codeString))
return V3ActRelationshipType.CAUS;
if ("COMP".equals(codeString))
return V3ActRelationshipType.COMP;
if ("CTRLV".equals(codeString))
return V3ActRelationshipType.CTRLV;
if ("MBR".equals(codeString))
return V3ActRelationshipType.MBR;
if ("STEP".equals(codeString))
return V3ActRelationshipType.STEP;
if ("ARR".equals(codeString))
return V3ActRelationshipType.ARR;
if ("DEP".equals(codeString))
return V3ActRelationshipType.DEP;
if ("PART".equals(codeString))
return V3ActRelationshipType.PART;
if ("COVBY".equals(codeString))
return V3ActRelationshipType.COVBY;
if ("DRIV".equals(codeString))
return V3ActRelationshipType.DRIV;
if ("ELNK".equals(codeString))
return V3ActRelationshipType.ELNK;
if ("EVID".equals(codeString))
return V3ActRelationshipType.EVID;
if ("EXACBY".equals(codeString))
return V3ActRelationshipType.EXACBY;
if ("EXPL".equals(codeString))
return V3ActRelationshipType.EXPL;
if ("INTF".equals(codeString))
return V3ActRelationshipType.INTF;
if ("ITEMSLOC".equals(codeString))
return V3ActRelationshipType.ITEMSLOC;
if ("LIMIT".equals(codeString))
return V3ActRelationshipType.LIMIT;
if ("META".equals(codeString))
return V3ActRelationshipType.META;
if ("MFST".equals(codeString))
return V3ActRelationshipType.MFST;
if ("NAME".equals(codeString))
return V3ActRelationshipType.NAME;
if ("OUTC".equals(codeString))
return V3ActRelationshipType.OUTC;
if ("_ActRelationsipObjective".equals(codeString))
return V3ActRelationshipType._ACTRELATIONSIPOBJECTIVE;
if ("OBJC".equals(codeString))
return V3ActRelationshipType.OBJC;
if ("OBJF".equals(codeString))
return V3ActRelationshipType.OBJF;
if ("GOAL".equals(codeString))
return V3ActRelationshipType.GOAL;
if ("RISK".equals(codeString))
return V3ActRelationshipType.RISK;
if ("PERT".equals(codeString))
return V3ActRelationshipType.PERT;
if ("PREV".equals(codeString))
return V3ActRelationshipType.PREV;
if ("REFR".equals(codeString))
return V3ActRelationshipType.REFR;
if ("USE".equals(codeString))
return V3ActRelationshipType.USE;
if ("REFV".equals(codeString))
return V3ActRelationshipType.REFV;
if ("RELVBY".equals(codeString))
return V3ActRelationshipType.RELVBY;
if ("SEQL".equals(codeString))
return V3ActRelationshipType.SEQL;
if ("APND".equals(codeString))
return V3ActRelationshipType.APND;
if ("BSLN".equals(codeString))
return V3ActRelationshipType.BSLN;
if ("COMPLY".equals(codeString))
return V3ActRelationshipType.COMPLY;
if ("DOC".equals(codeString))
return V3ActRelationshipType.DOC;
if ("FLFS".equals(codeString))
return V3ActRelationshipType.FLFS;
if ("OCCR".equals(codeString))
return V3ActRelationshipType.OCCR;
if ("OREF".equals(codeString))
return V3ActRelationshipType.OREF;
if ("SCH".equals(codeString))
return V3ActRelationshipType.SCH;
if ("GEN".equals(codeString))
return V3ActRelationshipType.GEN;
if ("GEVL".equals(codeString))
return V3ActRelationshipType.GEVL;
if ("INST".equals(codeString))
return V3ActRelationshipType.INST;
if ("MOD".equals(codeString))
return V3ActRelationshipType.MOD;
if ("MTCH".equals(codeString))
return V3ActRelationshipType.MTCH;
if ("OPTN".equals(codeString))
return V3ActRelationshipType.OPTN;
if ("RCHAL".equals(codeString))
return V3ActRelationshipType.RCHAL;
if ("REV".equals(codeString))
return V3ActRelationshipType.REV;
if ("RPLC".equals(codeString))
return V3ActRelationshipType.RPLC;
if ("SUCC".equals(codeString))
return V3ActRelationshipType.SUCC;
if ("UPDT".equals(codeString))
return V3ActRelationshipType.UPDT;
if ("XCRPT".equals(codeString))
return V3ActRelationshipType.XCRPT;
if ("VRXCRPT".equals(codeString))
return V3ActRelationshipType.VRXCRPT;
if ("XFRM".equals(codeString))
return V3ActRelationshipType.XFRM;
if ("SPRT".equals(codeString))
return V3ActRelationshipType.SPRT;
if ("SPRTBND".equals(codeString))
return V3ActRelationshipType.SPRTBND;
if ("SUBJ".equals(codeString))
return V3ActRelationshipType.SUBJ;
if ("QUALF".equals(codeString))
return V3ActRelationshipType.QUALF;
if ("SUMM".equals(codeString))
return V3ActRelationshipType.SUMM;
if ("VALUE".equals(codeString))
return V3ActRelationshipType.VALUE;
throw new IllegalArgumentException("Unknown V3ActRelationshipType code '"+codeString+"'");
}
public String toCode(V3ActRelationshipType code) {
if (code == V3ActRelationshipType.ART)
return "ART";
if (code == V3ActRelationshipType._ACTCLASSTEMPORALLYPERTAINS)
return "_ActClassTemporallyPertains";
if (code == V3ActRelationshipType._ACTRELATIONSHIPACCOUNTING)
return "_ActRelationshipAccounting";
if (code == V3ActRelationshipType._ACTRELATIONSHIPCOSTTRACKING)
return "_ActRelationshipCostTracking";
if (code == V3ActRelationshipType.CHRG)
return "CHRG";
if (code == V3ActRelationshipType.COST)
return "COST";
if (code == V3ActRelationshipType._ACTRELATIONSHIPPOSTING)
return "_ActRelationshipPosting";
if (code == V3ActRelationshipType.CREDIT)
return "CREDIT";
if (code == V3ActRelationshipType.DEBIT)
return "DEBIT";
if (code == V3ActRelationshipType._ACTRELATIONSHIPCONDITIONAL)
return "_ActRelationshipConditional";
if (code == V3ActRelationshipType.CIND)
return "CIND";
if (code == V3ActRelationshipType.PRCN)
return "PRCN";
if (code == V3ActRelationshipType.RSON)
return "RSON";
if (code == V3ActRelationshipType.BLOCK)
return "BLOCK";
if (code == V3ActRelationshipType.DIAG)
return "DIAG";
if (code == V3ActRelationshipType.IMM)
return "IMM";
if (code == V3ActRelationshipType.ACTIMM)
return "ACTIMM";
if (code == V3ActRelationshipType.PASSIMM)
return "PASSIMM";
if (code == V3ActRelationshipType.MITGT)
return "MITGT";
if (code == V3ActRelationshipType.RCVY)
return "RCVY";
if (code == V3ActRelationshipType.PRYLX)
return "PRYLX";
if (code == V3ActRelationshipType.TREAT)
return "TREAT";
if (code == V3ActRelationshipType.ADJUNCT)
return "ADJUNCT";
if (code == V3ActRelationshipType.MTREAT)
return "MTREAT";
if (code == V3ActRelationshipType.PALLTREAT)
return "PALLTREAT";
if (code == V3ActRelationshipType.SYMP)
return "SYMP";
if (code == V3ActRelationshipType.TRIG)
return "TRIG";
if (code == V3ActRelationshipType._ACTRELATIONSHIPTEMPORALLYPERTAINS)
return "_ActRelationshipTemporallyPertains";
if (code == V3ActRelationshipType._ACTRELATIONSHIPTEMPORALLYPERTAINSAPPROXIMATES)
return "_ActRelationshipTemporallyPertainsApproximates";
if (code == V3ActRelationshipType.ENE)
return "ENE";
if (code == V3ActRelationshipType.ECW)
return "ECW";
if (code == V3ActRelationshipType.CONCURRENT)
return "CONCURRENT";
if (code == V3ActRelationshipType.SBSECWE)
return "SBSECWE";
if (code == V3ActRelationshipType.ENS)
return "ENS";
if (code == V3ActRelationshipType.ECWS)
return "ECWS";
if (code == V3ActRelationshipType.SNE)
return "SNE";
if (code == V3ActRelationshipType.SCWE)
return "SCWE";
if (code == V3ActRelationshipType.SNS)
return "SNS";
if (code == V3ActRelationshipType.SCW)
return "SCW";
if (code == V3ActRelationshipType.SCWSEBE)
return "SCWSEBE";
if (code == V3ActRelationshipType.SCWSEAE)
return "SCWSEAE";
if (code == V3ActRelationshipType.EAS)
return "EAS";
if (code == V3ActRelationshipType.EAE)
return "EAE";
if (code == V3ActRelationshipType.SASEAE)
return "SASEAE";
if (code == V3ActRelationshipType.SBEEAE)
return "SBEEAE";
if (code == V3ActRelationshipType.SASSBEEAS)
return "SASSBEEAS";
if (code == V3ActRelationshipType.SBSEAE)
return "SBSEAE";
if (code == V3ActRelationshipType.SAS)
return "SAS";
if (code == V3ActRelationshipType.SAE)
return "SAE";
if (code == V3ActRelationshipType.DURING)
return "DURING";
if (code == V3ActRelationshipType.SASECWE)
return "SASECWE";
if (code == V3ActRelationshipType.EASORECWS)
return "EASORECWS";
if (code == V3ActRelationshipType.EAEORECW)
return "EAEORECW";
if (code == V3ActRelationshipType.INDEPENDENT)
return "INDEPENDENT";
if (code == V3ActRelationshipType.SAEORSCWE)
return "SAEORSCWE";
if (code == V3ActRelationshipType.SASORSCW)
return "SASORSCW";
if (code == V3ActRelationshipType.SBEORSCWE)
return "SBEORSCWE";
if (code == V3ActRelationshipType.OVERLAP)
return "OVERLAP";
if (code == V3ActRelationshipType.EDU)
return "EDU";
if (code == V3ActRelationshipType.SBSEASEBE)
return "SBSEASEBE";
if (code == V3ActRelationshipType.SBSEAS)
return "SBSEAS";
if (code == V3ActRelationshipType.SDU)
return "SDU";
if (code == V3ActRelationshipType.SBE)
return "SBE";
if (code == V3ActRelationshipType.EBE)
return "EBE";
if (code == V3ActRelationshipType.SBSEBE)
return "SBSEBE";
if (code == V3ActRelationshipType.EBSORECWS)
return "EBSORECWS";
if (code == V3ActRelationshipType.EBS)
return "EBS";
if (code == V3ActRelationshipType.EBEORECW)
return "EBEORECW";
if (code == V3ActRelationshipType.SBSORSCW)
return "SBSORSCW";
if (code == V3ActRelationshipType.SBS)
return "SBS";
if (code == V3ActRelationshipType.AUTH)
return "AUTH";
if (code == V3ActRelationshipType.CAUS)
return "CAUS";
if (code == V3ActRelationshipType.COMP)
return "COMP";
if (code == V3ActRelationshipType.CTRLV)
return "CTRLV";
if (code == V3ActRelationshipType.MBR)
return "MBR";
if (code == V3ActRelationshipType.STEP)
return "STEP";
if (code == V3ActRelationshipType.ARR)
return "ARR";
if (code == V3ActRelationshipType.DEP)
return "DEP";
if (code == V3ActRelationshipType.PART)
return "PART";
if (code == V3ActRelationshipType.COVBY)
return "COVBY";
if (code == V3ActRelationshipType.DRIV)
return "DRIV";
if (code == V3ActRelationshipType.ELNK)
return "ELNK";
if (code == V3ActRelationshipType.EVID)
return "EVID";
if (code == V3ActRelationshipType.EXACBY)
return "EXACBY";
if (code == V3ActRelationshipType.EXPL)
return "EXPL";
if (code == V3ActRelationshipType.INTF)
return "INTF";
if (code == V3ActRelationshipType.ITEMSLOC)
return "ITEMSLOC";
if (code == V3ActRelationshipType.LIMIT)
return "LIMIT";
if (code == V3ActRelationshipType.META)
return "META";
if (code == V3ActRelationshipType.MFST)
return "MFST";
if (code == V3ActRelationshipType.NAME)
return "NAME";
if (code == V3ActRelationshipType.OUTC)
return "OUTC";
if (code == V3ActRelationshipType._ACTRELATIONSIPOBJECTIVE)
return "_ActRelationsipObjective";
if (code == V3ActRelationshipType.OBJC)
return "OBJC";
if (code == V3ActRelationshipType.OBJF)
return "OBJF";
if (code == V3ActRelationshipType.GOAL)
return "GOAL";
if (code == V3ActRelationshipType.RISK)
return "RISK";
if (code == V3ActRelationshipType.PERT)
return "PERT";
if (code == V3ActRelationshipType.PREV)
return "PREV";
if (code == V3ActRelationshipType.REFR)
return "REFR";
if (code == V3ActRelationshipType.USE)
return "USE";
if (code == V3ActRelationshipType.REFV)
return "REFV";
if (code == V3ActRelationshipType.RELVBY)
return "RELVBY";
if (code == V3ActRelationshipType.SEQL)
return "SEQL";
if (code == V3ActRelationshipType.APND)
return "APND";
if (code == V3ActRelationshipType.BSLN)
return "BSLN";
if (code == V3ActRelationshipType.COMPLY)
return "COMPLY";
if (code == V3ActRelationshipType.DOC)
return "DOC";
if (code == V3ActRelationshipType.FLFS)
return "FLFS";
if (code == V3ActRelationshipType.OCCR)
return "OCCR";
if (code == V3ActRelationshipType.OREF)
return "OREF";
if (code == V3ActRelationshipType.SCH)
return "SCH";
if (code == V3ActRelationshipType.GEN)
return "GEN";
if (code == V3ActRelationshipType.GEVL)
return "GEVL";
if (code == V3ActRelationshipType.INST)
return "INST";
if (code == V3ActRelationshipType.MOD)
return "MOD";
if (code == V3ActRelationshipType.MTCH)
return "MTCH";
if (code == V3ActRelationshipType.OPTN)
return "OPTN";
if (code == V3ActRelationshipType.RCHAL)
return "RCHAL";
if (code == V3ActRelationshipType.REV)
return "REV";
if (code == V3ActRelationshipType.RPLC)
return "RPLC";
if (code == V3ActRelationshipType.SUCC)
return "SUCC";
if (code == V3ActRelationshipType.UPDT)
return "UPDT";
if (code == V3ActRelationshipType.XCRPT)
return "XCRPT";
if (code == V3ActRelationshipType.VRXCRPT)
return "VRXCRPT";
if (code == V3ActRelationshipType.XFRM)
return "XFRM";
if (code == V3ActRelationshipType.SPRT)
return "SPRT";
if (code == V3ActRelationshipType.SPRTBND)
return "SPRTBND";
if (code == V3ActRelationshipType.SUBJ)
return "SUBJ";
if (code == V3ActRelationshipType.QUALF)
return "QUALF";
if (code == V3ActRelationshipType.SUMM)
return "SUMM";
if (code == V3ActRelationshipType.VALUE)
return "VALUE";
return "?";
}
}
| Nodstuff/hapi-fhir | hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/valuesets/V3ActRelationshipTypeEnumFactory.java | Java | apache-2.0 | 22,569 |
import ConfigParser
import os
import pwd
import shutil
import sys
import subprocess
import tempfile
sys.path.insert(0, os.path.join(os.environ['CHARM_DIR'], 'lib'))
from charmhelpers.core.hookenv import charm_dir, config, log, relation_set, open_port, close_port
from charmhelpers.core.templating import render
from charmhelpers.fetch import giturl, apt_install, apt_update, archiveurl
from charmhelpers.core.host import service_restart, service_start, service_stop
PACKAGES = [ 'git', 'python-setuptools', 'python-dev', 'python-pip', 'apache2' ]
ZUUL_GIT_URL = 'https://github.com/openstack-infra/zuul.git'
ZUUL_USER = 'zuul'
ZUUL_CONF_DIR = '/etc/zuul'
ZUUL_SSH_DIR = '/home/zuul/.ssh'
ZUUL_SSH_PRIVATE_FILE = 'id_rsa'
ZUUL_RUN_DIR = '/var/run/zuul'
ZUUL_MERGER_RUN_DIR = '/var/run/zuul-merger'
ZUUL_STATE_DIR = '/var/lib/zuul'
ZUUL_GIT_DIR = '/var/lib/zuul/git'
ZUUL_LOG_DIR = '/var/log/zuul'
APACHE2_CONF_DIR = '/etc/apache2'
GEAR_GIT_URL = 'https://github.com/openstack-infra/gear.git'
GEAR_STABLE_TAG = '0.7.0'
OPENSTACK_FUNCTIONS_URL = 'https://raw.githubusercontent.com/' \
'openstack-infra/project-config/master/zuul/openstack_functions.py'
def render_logging_conf():
logging_conf = os.path.join(ZUUL_CONF_DIR, 'logging.conf')
context = { 'zuul_log': os.path.join(ZUUL_LOG_DIR, 'zuul.log') }
render('logging.conf', logging_conf, context, ZUUL_USER, ZUUL_USER)
def render_gearman_logging_conf():
gearman_logging_conf = os.path.join(ZUUL_CONF_DIR, 'gearman-logging.conf')
context = {
'gearman_log': os.path.join(ZUUL_LOG_DIR, 'gearman-server.log')
}
render('gearman-logging.conf', gearman_logging_conf, context, ZUUL_USER,
ZUUL_USER)
def render_zuul_conf():
gearman_start = "false"
if is_service_enabled("gearman"):
gearman_start = "true"
context = {
'gearman_host': config('gearman-server'),
'gearman_port': config('gearman-port'),
'gearman_internal': gearman_start,
'gearman_log': os.path.join(ZUUL_CONF_DIR, 'gearman-logging.conf'),
'gerrit_server': config('gerrit-server'),
'gerrit_port': '29418',
'gerrit_username': config('username'),
'gerrit_sshkey': os.path.join(ZUUL_SSH_DIR, ZUUL_SSH_PRIVATE_FILE),
'zuul_layout': os.path.join(ZUUL_CONF_DIR, 'layout.yaml'),
'zuul_logging': os.path.join(ZUUL_CONF_DIR, 'logging.conf'),
'zuul_pidfile': os.path.join(ZUUL_RUN_DIR, 'zuul.pid'),
'zuul_state_dir': ZUUL_STATE_DIR,
'zuul_status_url': config('status-url'),
'zuul_git_dir': ZUUL_GIT_DIR,
'zuul_url': config('zuul-url'),
'zuul_smtp_server': config('zuul-smtp-server'),
'zuul_smtp_from': config('zuul-smtp-from'),
'zuul_smtp_to': config('zuul-smtp-to'),
'merger_git_user_email': config('git-user-email'),
'merger_git_user_name': config('git-user-name'),
'merger_pidfile': os.path.join(ZUUL_MERGER_RUN_DIR, 'merger.pid')
}
zuul_conf = os.path.join(ZUUL_CONF_DIR, 'zuul.conf')
render('zuul.conf', zuul_conf, context, ZUUL_USER, ZUUL_USER)
def render_layout():
if is_service_enabled("server"):
layout_template = 'layout_standard.yaml'
elif is_service_enabled("gearman"):
layout_template = 'layout_gearman.yaml'
else:
layout_template = ''
if layout_template:
layout_conf = os.path.join(ZUUL_CONF_DIR, 'layout.yaml')
render(layout_template, layout_conf, { }, ZUUL_USER, ZUUL_USER)
def render_zuul_vhost_conf():
context = {
'git_dir': ZUUL_GIT_DIR
}
zuul_vhost = os.path.join(APACHE2_CONF_DIR, 'sites-available/zuul.conf')
render('apache2-vhost.conf', zuul_vhost, context, perms=0o644)
def download_openstack_functions():
url_handler = archiveurl.ArchiveUrlFetchHandler()
openstack_functions_path = os.path.join(ZUUL_CONF_DIR,
'openstack_functions.py')
url_handler.download(OPENSTACK_FUNCTIONS_URL, openstack_functions_path)
zuul_user = pwd.getpwnam(ZUUL_USER)
os.chown(openstack_functions_path, zuul_user.pw_uid, zuul_user.pw_gid)
os.chmod(openstack_functions_path, 0644)
def create_zuul_upstart_services():
zuul_server = '/etc/init/zuul-server.conf'
zuul_merger = '/etc/init/zuul-merger.conf'
zuul_server_bin = '/usr/local/bin/zuul-server'
zuul_merger_bin = '/usr/local/bin/zuul-merger'
zuul_conf = os.path.join(ZUUL_CONF_DIR, 'zuul.conf')
context = {
'zuul_server_bin': zuul_server_bin,
'zuul_conf': zuul_conf,
'zuul_user': ZUUL_USER
}
if is_service_enabled("server") or is_service_enabled("gearman"):
render('upstart/zuul-server.conf', zuul_server, context, perms=0o644)
context.pop('zuul_server_bin')
if is_service_enabled("merger"):
context.update({'zuul_merger_bin': zuul_merger_bin})
render('upstart/zuul-merger.conf', zuul_merger, context, perms=0o644)
def install_from_git(repository_url, tag):
current_dir = os.getcwd()
temp_dir = tempfile.mkdtemp()
git_handler = giturl.GitUrlFetchHandler()
git_handler.clone(repository_url, temp_dir, 'master')
os.chdir(temp_dir)
subprocess.check_call(['git', 'checkout', 'tags/{0}'.format(tag)])
subprocess.check_call(['pip', 'install', '-r', './requirements.txt'])
subprocess.check_call(['python', './setup.py', 'install'])
os.chdir(current_dir)
shutil.rmtree(temp_dir)
def generate_zuul_ssh_key():
zuul_user = pwd.getpwnam(ZUUL_USER)
ssh_key = os.path.join(ZUUL_SSH_DIR, ZUUL_SSH_PRIVATE_FILE)
with open(ssh_key, 'w') as f:
f.write(config('ssh-key'))
os.chown(ssh_key, zuul_user.pw_uid, zuul_user.pw_gid)
os.chmod(ssh_key, 0600)
def update_zuul_conf():
configs = config()
services_restart = False
if configs.changed('ssh-key'):
generate_zuul_ssh_key()
configs_keys = ['gearman-port', 'gerrit-server', 'username', 'zuul-url',
'status-url', 'git-user-name', 'git-user-email',
'services', 'gearman-server' ]
for key in configs_keys:
if configs.changed(key):
services_restart = True
break
if not services_restart:
log("Zuul config values didn't change.")
return False
configs.save()
render_zuul_conf()
return services_restart
def configure_apache2():
render_zuul_vhost_conf()
# required apache2 modules
subprocess.check_call(["a2enmod", "cgi"])
subprocess.check_call(["a2enmod", "rewrite"])
# disable default website
subprocess.check_call(["a2dissite", "000-default"])
# enable zuul website
subprocess.check_call(["a2ensite", 'zuul'])
service_restart('apache2')
# HOOKS METHODS
def install():
subprocess.check_call(['apt-get', 'install', '-y'] + PACKAGES)
install_from_git(ZUUL_GIT_URL, config('version'))
install_from_git(GEAR_GIT_URL, GEAR_STABLE_TAG)
try:
pwd.getpwnam(ZUUL_USER)
except KeyError:
# create Zuul user
subprocess.check_call(["useradd", "--create-home", ZUUL_USER])
directories = [ ZUUL_CONF_DIR, ZUUL_SSH_DIR, ZUUL_RUN_DIR, ZUUL_STATE_DIR,
ZUUL_GIT_DIR, ZUUL_LOG_DIR, ZUUL_MERGER_RUN_DIR ]
zuul_user = pwd.getpwnam(ZUUL_USER)
for directory in directories:
if not os.path.exists(directory):
os.mkdir(directory)
os.chmod(directory, 0755)
os.chown(directory, zuul_user.pw_uid, zuul_user.pw_gid)
generate_zuul_ssh_key()
# generate configuration files
render_logging_conf()
render_gearman_logging_conf()
render_layout()
render_zuul_conf()
create_zuul_upstart_services()
download_openstack_functions()
configure_apache2()
def is_service_enabled(service):
return service in [i.strip() for i in config('services').split(',')]
def config_changed():
if update_zuul_conf():
# zuul.conf was updated and Zuul services must be restarted
if is_service_enabled("server") or is_service_enabled("gearman"):
service_restart('zuul-server')
if is_service_enabled("merger"):
service_restart('zuul-merger')
log('Zuul services restarted')
def start():
if is_service_enabled("server") or is_service_enabled("gearman"):
service_start('zuul-server')
if is_service_enabled("merger"):
service_start('zuul-merger')
log('Zuul services started.')
def stop():
if is_service_enabled("server") or is_service_enabled("gearman"):
service_stop('zuul-server')
if is_service_enabled("merger"):
service_stop('zuul-merger')
log('Zuul services stopped.')
def zuul_relation_changed():
gearman_port = config('gearman-port')
relation_set(gearman_ip=unit_get('public-address'),
gearman_port=gearman_port)
open_port(gearman_port)
def zuul_relation_broken():
close_port(config('gearman-port'))
| cloudbase/zuul-charm | hooks/hooks.py | Python | apache-2.0 | 9,009 |
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.openshift.client.mock;
import io.fabric8.openshift.api.model.BuildBuilder;
import io.fabric8.openshift.api.model.BuildList;
import io.fabric8.openshift.api.model.BuildListBuilder;
import io.fabric8.openshift.api.model.Group;
import io.fabric8.openshift.api.model.GroupBuilder;
import io.fabric8.openshift.api.model.GroupList;
import io.fabric8.openshift.api.model.GroupListBuilder;
import io.fabric8.openshift.api.model.User;
import io.fabric8.openshift.api.model.UserBuilder;
import io.fabric8.openshift.api.model.UserList;
import io.fabric8.openshift.api.model.UserListBuilder;
import io.fabric8.openshift.api.model.WebHookTrigger;
import io.fabric8.openshift.client.OpenShiftClient;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Test;
import static org.easymock.EasyMock.eq;
public class OpenShiftMockClientTest {
@Test
public void testGetBuild() {
OpenShiftMockClient mock = new OpenShiftMockClient();
mock.builds().inNamespace(eq("ns1")).withName("build1").get().andReturn(new BuildBuilder()
.withNewMetadata().withName("build1").endMetadata()
.build()
).anyTimes();
mock.builds().inNamespace("ns1").withName("build2").get().andReturn(new BuildBuilder()
.withNewMetadata().withName("build2").endMetadata()
.build()
).once();
mock.builds().inNamespace("ns1").withName("build2").get().andReturn(null).once();
OpenShiftClient client = mock.replay();
//We are testing the internal anyTimes() on namespace and name.
for (int i = 0; i < 5; i++) {
Assert.assertNotNull(client.builds().inNamespace("ns1").withName("build1").get());
}
Assert.assertNotNull(client.builds().inNamespace("ns1").withName("build2").get());
Assert.assertNull(client.builds().inNamespace("ns1").withName("build2").get());
}
@Test
public void testListBuilds() {
OpenShiftMockClient mock = new OpenShiftMockClient();
mock.builds().inNamespace("ns1").withLabel("component", "f1").list().andReturn(new BuildListBuilder()
.addNewItem()
.withNewMetadata().withName("build1").endMetadata()
.endItem()
.addNewItem()
.withNewMetadata().withName("build2").endMetadata()
.endItem()
.build()
).anyTimes();
mock.builds().inNamespace("ns1").withLabel("component", "f2").list().andReturn(new BuildListBuilder()
.addNewItem()
.withNewMetadata().withName("build1").endMetadata()
.endItem()
.build()
).anyTimes();
OpenShiftClient client = mock.replay();
for (int i=0;i<5;i++) {
BuildList result = client.builds().inNamespace("ns1").withLabel("component", "f1").list();
Assert.assertNotNull(result);
Assert.assertEquals(2, result.getItems().size());
}
for (int i=0;i<5;i++) {
BuildList result = client.builds().inNamespace("ns1").withLabel("component", "f2").list();
Assert.assertNotNull(result);
Assert.assertEquals(1, result.getItems().size());
}
}
@Test
public void testWebHookTrigger() {
OpenShiftMockClient mock = new OpenShiftMockClient();
mock.buildConfigs().inNamespace("ns1").withName("build1").withSecret("secret101").withType("github").trigger(EasyMock.<WebHookTrigger>anyObject()).andReturn(null).once();
OpenShiftClient client = mock.replay();
client.buildConfigs().inNamespace("ns1").withName("build1").withSecret("secret101").withType("github").trigger(new WebHookTrigger());
mock.verify();
}
@Test
public void testUsers() {
OpenShiftMockClient mock = new OpenShiftMockClient();
User myuser = new UserBuilder()
.withNewMetadata()
.withName("myuser")
.endMetadata()
.withFullName("My User")
.addToIdentities("myuser")
.build();
mock.users().list().andReturn(new UserListBuilder().addToItems(myuser).build()).anyTimes();
mock.users().withLabel("key1", "value1").list().andReturn(new UserListBuilder().addToItems(myuser).build()).anyTimes();
OpenShiftClient client = mock.replay();
UserList userList = client.users().list();
Assert.assertFalse(userList.getItems().isEmpty());
userList = client.users().withLabel("key1","value1").list();
Assert.assertFalse(userList.getItems().isEmpty());
Assert.assertTrue(userList.getItems().get(0).getIdentities().contains("myuser"));
mock.verify();
}
@Test
public void testGroups() {
OpenShiftMockClient mock = new OpenShiftMockClient();
Group mygroup = new GroupBuilder()
.withNewMetadata()
.withName("mygroup")
.endMetadata()
.addToUsers("myuser", "myotheruser")
.build();
mock.groups().list().andReturn(new GroupListBuilder().addToItems(mygroup).build()).anyTimes();
mock.groups().withLabel("key1", "value1").list().andReturn(new GroupListBuilder().addToItems(mygroup).build()).anyTimes();
OpenShiftClient client = mock.replay();
GroupList groupList = client.groups().list();
Assert.assertFalse(groupList.getItems().isEmpty());
groupList = client.groups().withLabel("key1","value1").list();
Assert.assertFalse(groupList.getItems().isEmpty());
Assert.assertTrue(groupList.getItems().get(0).getUsers().contains("myuser"));
Assert.assertTrue(groupList.getItems().get(0).getUsers().contains("myotheruser"));
mock.verify();
}
}
| rouzwawi/kubernetes-client | openshift-mock/src/test/java/io/fabric8/openshift/client/mock/OpenShiftMockClientTest.java | Java | apache-2.0 | 5,979 |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.authorization.admin.representation;
import org.keycloak.authorization.AuthorizationProvider;
import org.keycloak.authorization.Decision;
import org.keycloak.authorization.admin.PolicyEvaluationService;
import org.keycloak.authorization.common.KeycloakIdentity;
import org.keycloak.authorization.model.PermissionTicket;
import org.keycloak.authorization.model.Policy;
import org.keycloak.authorization.model.ResourceServer;
import org.keycloak.authorization.model.Scope;
import org.keycloak.authorization.policy.evaluation.Result;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.models.RealmModel;
import org.keycloak.models.UserModel;
import org.keycloak.representations.AccessToken;
import org.keycloak.representations.idm.authorization.DecisionEffect;
import org.keycloak.representations.idm.authorization.PolicyEvaluationResponse;
import org.keycloak.representations.idm.authorization.PolicyEvaluationResponse.PolicyResultRepresentation;
import org.keycloak.representations.idm.authorization.PolicyRepresentation;
import org.keycloak.representations.idm.authorization.ResourceRepresentation;
import org.keycloak.representations.idm.authorization.ScopeRepresentation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class PolicyEvaluationResponseBuilder {
public static PolicyEvaluationResponse build(PolicyEvaluationService.EvaluationDecisionCollector decision, ResourceServer resourceServer, AuthorizationProvider authorization, KeycloakIdentity identity) {
PolicyEvaluationResponse response = new PolicyEvaluationResponse();
List<PolicyEvaluationResponse.EvaluationResultRepresentation> resultsRep = new ArrayList<>();
AccessToken accessToken = identity.getAccessToken();
AccessToken.Authorization authorizationData = new AccessToken.Authorization();
authorizationData.setPermissions(decision.results());
accessToken.setAuthorization(authorizationData);
ClientModel clientModel = authorization.getRealm().getClientById(resourceServer.getId());
if (!accessToken.hasAudience(clientModel.getClientId())) {
accessToken.audience(clientModel.getClientId());
}
response.setRpt(accessToken);
Collection<Result> results = decision.getResults();
if (results.stream().anyMatch(evaluationResult -> evaluationResult.getEffect().equals(Decision.Effect.DENY))) {
response.setStatus(DecisionEffect.DENY);
} else {
response.setStatus(DecisionEffect.PERMIT);
}
for (Result result : results) {
PolicyEvaluationResponse.EvaluationResultRepresentation rep = new PolicyEvaluationResponse.EvaluationResultRepresentation();
if (result.getEffect() == Decision.Effect.DENY) {
rep.setStatus(DecisionEffect.DENY);
} else {
rep.setStatus(DecisionEffect.PERMIT);
}
resultsRep.add(rep);
if (result.getPermission().getResource() != null) {
ResourceRepresentation resource = new ResourceRepresentation();
resource.setId(result.getPermission().getResource().getId());
resource.setName(result.getPermission().getResource().getName());
rep.setResource(resource);
} else {
ResourceRepresentation resource = new ResourceRepresentation();
resource.setName("Any Resource with Scopes " + result.getPermission().getScopes().stream().map(Scope::getName).collect(Collectors.toList()));
rep.setResource(resource);
}
rep.setScopes(result.getPermission().getScopes().stream().map(scope -> {
ScopeRepresentation representation = new ScopeRepresentation();
representation.setId(scope.getId());
representation.setName(scope.getName());
return representation;
}).collect(Collectors.toList()));
List<PolicyEvaluationResponse.PolicyResultRepresentation> policies = new ArrayList<>();
for (Result.PolicyResult policy : result.getResults()) {
PolicyResultRepresentation policyRep = toRepresentation(policy, authorization);
if ("resource".equals(policy.getPolicy().getType())) {
policyRep.getPolicy().setScopes(result.getPermission().getResource().getScopes().stream().map(Scope::getName).collect(Collectors.toSet()));
}
policies.add(policyRep);
}
rep.setPolicies(policies);
}
resultsRep.sort(Comparator.comparing(o -> o.getResource().getName()));
Map<String, PolicyEvaluationResponse.EvaluationResultRepresentation> groupedResults = new HashMap<>();
resultsRep.forEach(evaluationResultRepresentation -> {
PolicyEvaluationResponse.EvaluationResultRepresentation result = groupedResults.get(evaluationResultRepresentation.getResource().getId());
ResourceRepresentation resource = evaluationResultRepresentation.getResource();
if (result == null) {
groupedResults.put(resource.getId(), evaluationResultRepresentation);
result = evaluationResultRepresentation;
}
if (result.getStatus().equals(DecisionEffect.PERMIT) || (evaluationResultRepresentation.getStatus().equals(DecisionEffect.PERMIT) && result.getStatus().equals(DecisionEffect.DENY))) {
result.setStatus(DecisionEffect.PERMIT);
}
List<ScopeRepresentation> scopes = result.getScopes();
if (DecisionEffect.PERMIT.equals(result.getStatus())) {
result.setAllowedScopes(scopes);
}
if (resource.getId() != null) {
if (!scopes.isEmpty()) {
result.getResource().setName(evaluationResultRepresentation.getResource().getName() + " with scopes " + scopes.stream().flatMap((Function<ScopeRepresentation, Stream<?>>) scopeRepresentation -> Arrays.asList(scopeRepresentation.getName()).stream()).collect(Collectors.toList()));
} else {
result.getResource().setName(evaluationResultRepresentation.getResource().getName());
}
} else {
result.getResource().setName("Any Resource with Scopes " + scopes.stream().flatMap((Function<ScopeRepresentation, Stream<?>>) scopeRepresentation -> Arrays.asList(scopeRepresentation.getName()).stream()).collect(Collectors.toList()));
}
List<PolicyEvaluationResponse.PolicyResultRepresentation> policies = result.getPolicies();
for (PolicyEvaluationResponse.PolicyResultRepresentation policy : new ArrayList<>(evaluationResultRepresentation.getPolicies())) {
if (!policies.contains(policy)) {
policies.add(policy);
}
}
});
response.setResults(groupedResults.values().stream().collect(Collectors.toList()));
return response;
}
private static PolicyEvaluationResponse.PolicyResultRepresentation toRepresentation(Result.PolicyResult result, AuthorizationProvider authorization) {
PolicyEvaluationResponse.PolicyResultRepresentation policyResultRep = new PolicyEvaluationResponse.PolicyResultRepresentation();
PolicyRepresentation representation = new PolicyRepresentation();
Policy policy = result.getPolicy();
representation.setId(policy.getId());
representation.setName(policy.getName());
representation.setType(policy.getType());
representation.setDecisionStrategy(policy.getDecisionStrategy());
representation.setDescription(policy.getDescription());
if ("uma".equals(representation.getType())) {
Map<String, String> filters = new HashMap<>();
filters.put(PermissionTicket.POLICY, policy.getId());
List<PermissionTicket> tickets = authorization.getStoreFactory().getPermissionTicketStore().find(filters, policy.getResourceServer().getId(), -1, 1);
if (!tickets.isEmpty()) {
KeycloakSession keycloakSession = authorization.getKeycloakSession();
RealmModel realm = authorization.getRealm();
PermissionTicket ticket = tickets.get(0);
UserModel userOwner = keycloakSession.users().getUserById(ticket.getOwner(), realm);
UserModel requester = keycloakSession.users().getUserById(ticket.getRequester(), realm);
String resourceOwner;
if (userOwner != null) {
resourceOwner = getUserEmailOrUserName(userOwner);
} else {
ClientModel clientOwner = realm.getClientById(ticket.getOwner());
resourceOwner = clientOwner.getClientId();
}
representation.setDescription("Resource owner (" + resourceOwner + ") grants access to " + getUserEmailOrUserName(requester));
} else {
String description = representation.getDescription();
if (description != null) {
representation.setDescription(description + " (User-Managed Policy)");
} else {
representation.setDescription("User-Managed Policy");
}
}
}
representation.setResources(policy.getResources().stream().map(resource -> resource.getName()).collect(Collectors.toSet()));
Set<String> scopeNames = policy.getScopes().stream().map(scope -> scope.getName()).collect(Collectors.toSet());
representation.setScopes(scopeNames);
policyResultRep.setPolicy(representation);
if (result.getEffect() == Decision.Effect.DENY) {
policyResultRep.setStatus(DecisionEffect.DENY);
policyResultRep.setScopes(representation.getScopes());
} else {
policyResultRep.setStatus(DecisionEffect.PERMIT);
}
policyResultRep.setAssociatedPolicies(result.getAssociatedPolicies().stream().map(policy1 -> toRepresentation(policy1, authorization)).collect(Collectors.toList()));
return policyResultRep;
}
private static String getUserEmailOrUserName(UserModel user) {
return (user.getEmail() != null ? user.getEmail() : user.getUsername());
}
}
| mhajas/keycloak | services/src/main/java/org/keycloak/authorization/admin/representation/PolicyEvaluationResponseBuilder.java | Java | apache-2.0 | 11,531 |
package types
import (
"bytes"
"encoding/json"
"strings"
"testing"
"github.com/gogo/protobuf/proto"
"github.com/stretchr/testify/assert"
cmn "github.com/tendermint/tmlibs/common"
)
func TestMarshalJSON(t *testing.T) {
b, err := json.Marshal(&ResponseDeliverTx{})
assert.Nil(t, err)
// Do not include empty fields.
assert.False(t, strings.Contains(string(b), "code"))
r1 := ResponseCheckTx{
Code: 1,
Data: []byte("hello"),
GasWanted: 43,
Tags: []cmn.KVPair{
{[]byte("pho"), []byte("bo")},
},
}
b, err = json.Marshal(&r1)
assert.Nil(t, err)
var r2 ResponseCheckTx
err = json.Unmarshal(b, &r2)
assert.Nil(t, err)
assert.Equal(t, r1, r2)
}
func TestWriteReadMessageSimple(t *testing.T) {
cases := []proto.Message{
&RequestEcho{
Message: "Hello",
},
}
for _, c := range cases {
buf := new(bytes.Buffer)
err := WriteMessage(c, buf)
assert.Nil(t, err)
msg := new(RequestEcho)
err = ReadMessage(buf, msg)
assert.Nil(t, err)
assert.Equal(t, c, msg)
}
}
func TestWriteReadMessage(t *testing.T) {
cases := []proto.Message{
&Header{
NumTxs: 4,
},
// TODO: add the rest
}
for _, c := range cases {
buf := new(bytes.Buffer)
err := WriteMessage(c, buf)
assert.Nil(t, err)
msg := new(Header)
err = ReadMessage(buf, msg)
assert.Nil(t, err)
assert.Equal(t, c, msg)
}
}
func TestWriteReadMessage2(t *testing.T) {
phrase := "hello-world"
cases := []proto.Message{
&ResponseCheckTx{
Data: []byte(phrase),
Log: phrase,
GasWanted: 10,
Tags: []cmn.KVPair{
cmn.KVPair{[]byte("abc"), []byte("def")},
},
// Fee: cmn.KI64Pair{
},
// TODO: add the rest
}
for _, c := range cases {
buf := new(bytes.Buffer)
err := WriteMessage(c, buf)
assert.Nil(t, err)
msg := new(ResponseCheckTx)
err = ReadMessage(buf, msg)
assert.Nil(t, err)
assert.Equal(t, c, msg)
}
}
| tendermint/tmsp | types/messages_test.go | GO | apache-2.0 | 1,896 |
/*
** Copyright 2016 Centreon
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
**
** For more information : contact@centreon.com
*/
#ifndef CCB_BAM_BOOL_NOT_EQUAL_HH
#define CCB_BAM_BOOL_NOT_EQUAL_HH
#include "com/centreon/broker/bam/bool_binary_operator.hh"
#include "com/centreon/broker/namespace.hh"
CCB_BEGIN()
namespace bam {
/**
* @class bool_not_equal bool_not_equal.hh
* "com/centreon/broker/bam/bool_not_equal.hh"
* @brief Not-equal (!=) operator.
*
* In the context of expression computation, bool_not_equal checks
* whether or not two operands are not equal.
*/
class bool_not_equal : public bool_binary_operator {
public:
bool_not_equal() = default;
bool_not_equal(bool_not_equal const&) = delete;
~bool_not_equal() noexcept = default;
bool_not_equal& operator=(bool_not_equal const&) = delete;
double value_hard();
double value_soft();
};
} // namespace bam
CCB_END()
#endif // !CCB_BAM_BOOL_NOT_EQUAL_HH
| centreon/centreon-broker | bam/inc/com/centreon/broker/bam/bool_not_equal.hh | C++ | apache-2.0 | 1,459 |
package com.zl.pattern.command;
import java.util.ArrayList;
public class Buttons {
private LeftCommand leftCommand; //Ïò×óÒÆ¶¯µÄÃüÁî¶ÔÏóÒýÓÃ
private RightCommand rightCommand; //ÏòÓÒÒÆ¶¯µÄÃüÁî¶ÔÏóÒýÓÃ
private UpCommand upCommand; //ÏòÉÏÒÆ¶¯µÄÃüÁî¶ÔÏóÒýÓÃ
private DownCommand downCommand; //ÏòÏÂÒÆ¶¯µÄÃüÁî¶ÔÏóÒýÓÃ
private RevokeCommand revokeCommand; //³·ÏúÃüÁî¶ÔÏóÒýÓÃ
private ArrayList<Command> commandList = new ArrayList<Command>();//ÓÃÓڼǼÃüÁ×÷
/**
* »ñȡִÐÐÃüÁî
*/
public void getCommandList(){
for(Command c : commandList){
c.getCommand();
}
System.out.println("");
}
/**
* ÉèÖÃÏò×óÒÆ¶¯µÄÃüÁî¶ÔÏó
*
* @param leftCommand Ïò×óÒÆ¶¯µÄÃüÁî¶ÔÏó
*/
public void setLeftCommand(LeftCommand leftCommand){
this.leftCommand = leftCommand;
}
/**
* ÉèÖÃÏòÓÒÒÆ¶¯µÄÃüÁî¶ÔÏó
*
* @param rightCommand ÏòÓÒÒÆ¶¯µÄÃüÁî¶ÔÏó
*/
public void setRightCommand(RightCommand rightCommand){
this.rightCommand = rightCommand;
}
/**
* ÉèÖÃÏòÉÏÒÆ¶¯µÄÃüÁî¶ÔÏó
*
* @param upCommand ÏòÉÏÒÆ¶¯µÄÃüÁî¶ÔÏó
*/
public void setUpCommand(UpCommand upCommand){
this.upCommand = upCommand;
}
/**
* ÉèÖÃÏòÏÂÒÆ¶¯µÄÃüÁî¶ÔÏó
*
* @param downCommand ÏòÏÂÒÆ¶¯µÄÃüÁî¶ÔÏó
*/
public void setDownCommand(DownCommand downCommand){
this.downCommand = downCommand;
}
/**
* ÉèÖó·ÏúÃüÁî¶ÔÏó
*
* @param revokeCommand ³·ÏúÃüÁî¶ÔÏó
*/
public void setRevokeCommand(RevokeCommand revokeCommand){
this.revokeCommand = revokeCommand;
}
/**
* °´ÏÂÏò×ó°´Å¥
*/
public void toLeft(){
leftCommand.execute();
commandList.add(leftCommand);
}
/**
* °´ÏÂÏòÓÒ°´Å¥
*/
public void toRight(){
rightCommand.execute();
commandList.add(rightCommand);
}
/**
* °´ÏÂÏòÉϰ´Å¥
*/
public void toUp(){
upCommand.execute();
commandList.add(upCommand);
}
/**
* °´ÏÂÏòϰ´Å¥
*/
public void toDown(){
downCommand.execute();
commandList.add(downCommand);
}
/**
* °´Ï³·Ïú°´Å¥
*/
public void toRevoke(){
revokeCommand.execute();
commandList.remove(commandList.size()-1);
}
}
| Pluckypan/bigAndroid | Pattern/src/com/zl/pattern/command/Buttons.java | Java | apache-2.0 | 2,068 |
using System;
namespace TestCodeMetrics.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Describes a type model.
/// </summary>
public abstract class ModelDescription
{
public string Documentation { get; set; }
public Type ModelType { get; set; }
public string Name { get; set; }
}
} | nanovazquez/test-code-metrics | TestCodeMetrics/Areas/HelpPage/ModelDescriptions/ModelDescription.cs | C# | apache-2.0 | 353 |
package xyz.yhsj.mvprodemo.model.empty;
import java.util.List;
/**
* Created by LOVE on 2016/1/27.
*/
public class AdditionInfoEntity {
/**
* locale : zh_CN
* source : ["xiaomi"]
* status : 0
* timestamp : 1449046526332
*/
private String locale;
private String status;
private long timestamp;
private List<String> source;
public void setLocale(String locale) {
this.locale = locale;
}
public void setStatus(String status) {
this.status = status;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public void setSource(List<String> source) {
this.source = source;
}
public String getLocale() {
return locale;
}
public String getStatus() {
return status;
}
public long getTimestamp() {
return timestamp;
}
public List<String> getSource() {
return source;
}
}
| yhsj0919/MVPro | app/src/main/java/xyz/yhsj/mvprodemo/model/empty/AdditionInfoEntity.java | Java | apache-2.0 | 970 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceResult;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DisassociateSubnetCidrBlockResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Information about the IPv6 CIDR block association.
* </p>
*/
private SubnetIpv6CidrBlockAssociation ipv6CidrBlockAssociation;
/**
* <p>
* The ID of the subnet.
* </p>
*/
private String subnetId;
/**
* <p>
* Information about the IPv6 CIDR block association.
* </p>
*
* @param ipv6CidrBlockAssociation
* Information about the IPv6 CIDR block association.
*/
public void setIpv6CidrBlockAssociation(SubnetIpv6CidrBlockAssociation ipv6CidrBlockAssociation) {
this.ipv6CidrBlockAssociation = ipv6CidrBlockAssociation;
}
/**
* <p>
* Information about the IPv6 CIDR block association.
* </p>
*
* @return Information about the IPv6 CIDR block association.
*/
public SubnetIpv6CidrBlockAssociation getIpv6CidrBlockAssociation() {
return this.ipv6CidrBlockAssociation;
}
/**
* <p>
* Information about the IPv6 CIDR block association.
* </p>
*
* @param ipv6CidrBlockAssociation
* Information about the IPv6 CIDR block association.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DisassociateSubnetCidrBlockResult withIpv6CidrBlockAssociation(SubnetIpv6CidrBlockAssociation ipv6CidrBlockAssociation) {
setIpv6CidrBlockAssociation(ipv6CidrBlockAssociation);
return this;
}
/**
* <p>
* The ID of the subnet.
* </p>
*
* @param subnetId
* The ID of the subnet.
*/
public void setSubnetId(String subnetId) {
this.subnetId = subnetId;
}
/**
* <p>
* The ID of the subnet.
* </p>
*
* @return The ID of the subnet.
*/
public String getSubnetId() {
return this.subnetId;
}
/**
* <p>
* The ID of the subnet.
* </p>
*
* @param subnetId
* The ID of the subnet.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DisassociateSubnetCidrBlockResult withSubnetId(String subnetId) {
setSubnetId(subnetId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getIpv6CidrBlockAssociation() != null)
sb.append("Ipv6CidrBlockAssociation: ").append(getIpv6CidrBlockAssociation()).append(",");
if (getSubnetId() != null)
sb.append("SubnetId: ").append(getSubnetId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DisassociateSubnetCidrBlockResult == false)
return false;
DisassociateSubnetCidrBlockResult other = (DisassociateSubnetCidrBlockResult) obj;
if (other.getIpv6CidrBlockAssociation() == null ^ this.getIpv6CidrBlockAssociation() == null)
return false;
if (other.getIpv6CidrBlockAssociation() != null && other.getIpv6CidrBlockAssociation().equals(this.getIpv6CidrBlockAssociation()) == false)
return false;
if (other.getSubnetId() == null ^ this.getSubnetId() == null)
return false;
if (other.getSubnetId() != null && other.getSubnetId().equals(this.getSubnetId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getIpv6CidrBlockAssociation() == null) ? 0 : getIpv6CidrBlockAssociation().hashCode());
hashCode = prime * hashCode + ((getSubnetId() == null) ? 0 : getSubnetId().hashCode());
return hashCode;
}
@Override
public DisassociateSubnetCidrBlockResult clone() {
try {
return (DisassociateSubnetCidrBlockResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| aws/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/DisassociateSubnetCidrBlockResult.java | Java | apache-2.0 | 5,561 |
namespace CustomTextBox
{
partial class RegexTextBox
{
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region 组件设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent()
{
this.SuspendLayout();
this.ResumeLayout(false);
}
#endregion
}
}
| haowenbiao/His6C- | 控件/TextBoxWithButton/TextBoxWithButton/RegexTextBox.Designer.cs | C# | apache-2.0 | 1,023 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package View;
import Control.M01pegaCtrl;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.JOptionPane;
/**
*
* @author Thinkpad
*/
public class M01pegaView extends javax.swing.JInternalFrame {
/**
* Creates new form M01pegaView
*/
public M01pegaView() {
initComponents();
M01pegaCtrl m03 = new M01pegaCtrl();
tblPegawai.setModel(m03.getDaftarPegawai());
setEditStatus(false);
setVisible(true);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jScrollPane1 = new javax.swing.JScrollPane();
tblPegawai = new javax.swing.JTable();
jPanel1 = new javax.swing.JPanel();
jPanel2 = new javax.swing.JPanel();
txtNik = new javax.swing.JTextField();
jLabel6 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
txtGender = new javax.swing.JTextField();
txtPassword = new javax.swing.JTextField();
jLabel5 = new javax.swing.JLabel();
txtNama = new javax.swing.JTextField();
jLabel3 = new javax.swing.JLabel();
txtStatus = new javax.swing.JTextField();
txtUsername = new javax.swing.JTextField();
jLabel4 = new javax.swing.JLabel();
txtTelepon = new javax.swing.JTextField();
jLabel7 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
txtAlamat = new javax.swing.JTextField();
jLabel9 = new javax.swing.JLabel();
txtAgama = new javax.swing.JTextField();
jLabel10 = new javax.swing.JLabel();
txtTglLahir = new javax.swing.JTextField();
jLabel11 = new javax.swing.JLabel();
txtEmail = new javax.swing.JTextField();
jLabel12 = new javax.swing.JLabel();
txtTglMasuk = new javax.swing.JTextField();
jLabel13 = new javax.swing.JLabel();
cmbKdJaba = new javax.swing.JComboBox();
jPanel3 = new javax.swing.JPanel();
btnTambah = new javax.swing.JButton();
btnCari = new javax.swing.JButton();
btnEdit = new javax.swing.JButton();
btnClear = new javax.swing.JButton();
btnPrint = new javax.swing.JButton();
btnHapus = new javax.swing.JButton();
btnBatal = new javax.swing.JButton();
setPreferredSize(new java.awt.Dimension(1132, 600));
jScrollPane1.setPreferredSize(null);
tblPegawai.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
tblPegawai.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
tblPegawaiMouseClicked(evt);
}
});
jScrollPane1.setViewportView(tblPegawai);
jPanel2.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jPanel2.setPreferredSize(new java.awt.Dimension(270, 232));
txtNik.setPreferredSize(new java.awt.Dimension(160, 20));
txtNik.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtNikActionPerformed(evt);
}
});
txtNik.addKeyListener(new java.awt.event.KeyAdapter() {
public void keyReleased(java.awt.event.KeyEvent evt) {
txtNikKeyReleased(evt);
}
});
jLabel6.setText("Status");
jLabel2.setText("Nama");
jLabel1.setText("NIK");
txtGender.setPreferredSize(new java.awt.Dimension(140, 20));
txtPassword.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel5.setText("Gender");
txtNama.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel3.setText("Username");
txtStatus.setPreferredSize(new java.awt.Dimension(140, 20));
txtUsername.setPreferredSize(new java.awt.Dimension(140, 20));
txtUsername.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtUsernameActionPerformed(evt);
}
});
jLabel4.setText("Password");
txtTelepon.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel7.setText("Telepon");
jLabel8.setText("Alamat");
txtAlamat.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel9.setText("Agama");
txtAgama.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel10.setText("Tanggal Lahir");
txtTglLahir.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel11.setText("Email");
txtEmail.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel12.setText("Tanggal Masuk");
txtTglMasuk.setPreferredSize(new java.awt.Dimension(140, 20));
jLabel13.setText("Kode Jabatan");
cmbKdJaba.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Item 1", "Item 2", "Item 3", "Item 4" }));
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel6)
.addComponent(jLabel5)
.addComponent(jLabel4)
.addComponent(jLabel3)
.addComponent(jLabel2)
.addComponent(jLabel1)
.addComponent(jLabel7)
.addComponent(jLabel8)
.addComponent(jLabel9))
.addGap(33, 33, 33)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtNama, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtUsername, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtPassword, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtGender, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtStatus, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtTelepon, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtAlamat, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtAgama, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtNik, javax.swing.GroupLayout.DEFAULT_SIZE, 171, Short.MAX_VALUE)))
.addGroup(jPanel2Layout.createSequentialGroup()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel12)
.addComponent(jLabel11)
.addComponent(jLabel13)
.addComponent(jLabel10))
.addGap(10, 10, 10)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtTglLahir, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtEmail, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(txtTglMasuk, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(cmbKdJaba, 0, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))
.addContainerGap())
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(txtNik, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(txtNama, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(txtUsername, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(txtPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel5)
.addComponent(txtGender, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel6)
.addComponent(txtStatus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(txtTelepon, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel8)
.addComponent(txtAlamat, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel9)
.addComponent(txtAgama, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel10)
.addComponent(txtTglLahir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel11)
.addComponent(txtEmail, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel12)
.addComponent(txtTglMasuk, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel13)
.addComponent(cmbKdJaba, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel3.setBorder(javax.swing.BorderFactory.createEtchedBorder());
btnTambah.setText("Tambah");
btnTambah.setPreferredSize(new java.awt.Dimension(80, 23));
btnTambah.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnTambahActionPerformed(evt);
}
});
btnCari.setText("Cari");
btnCari.setPreferredSize(new java.awt.Dimension(80, 23));
btnCari.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCariActionPerformed(evt);
}
});
btnEdit.setText("Edit");
btnEdit.setPreferredSize(new java.awt.Dimension(80, 23));
btnEdit.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnEditActionPerformed(evt);
}
});
btnClear.setText("Clear");
btnClear.setPreferredSize(new java.awt.Dimension(80, 23));
btnClear.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnClearActionPerformed(evt);
}
});
btnPrint.setText("Print");
btnPrint.setPreferredSize(new java.awt.Dimension(80, 23));
btnPrint.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnPrintActionPerformed(evt);
}
});
btnHapus.setText("Hapus");
btnHapus.setPreferredSize(new java.awt.Dimension(80, 23));
btnHapus.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnHapusActionPerformed(evt);
}
});
btnBatal.setText("Batal");
btnBatal.setPreferredSize(new java.awt.Dimension(80, 23));
btnBatal.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnBatalActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(btnTambah, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnHapus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnCari, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(btnEdit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnClear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(btnPrint, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(btnBatal, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(10, 10, 10))
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnTambah, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnHapus, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnCari, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnEdit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnClear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(btnPrint, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addComponent(btnBatal, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, 276, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, 358, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 804, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 549, Short.MAX_VALUE)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void tblPegawaiMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_tblPegawaiMouseClicked
int tableData = tblPegawai.getSelectedRow();
txtNik.setText(tblPegawai.getValueAt(tableData, 0).toString());
txtNama.setText(tblPegawai.getValueAt(tableData, 1).toString());
txtUsername.setText(tblPegawai.getValueAt(tableData, 2).toString());
txtPassword.setText(tblPegawai.getValueAt(tableData, 3).toString());
txtGender.setText(tblPegawai.getValueAt(tableData, 4).toString());
txtStatus.setText(tblPegawai.getValueAt(tableData, 5).toString());
txtTelepon.setText(tblPegawai.getValueAt(tableData, 65).toString());
txtAlamat.setText(tblPegawai.getValueAt(tableData, 7).toString());
txtAgama.setText(tblPegawai.getValueAt(tableData, 8).toString());
txtTglLahir.setText(tblPegawai.getValueAt(tableData, 9).toString());
txtEmail.setText(tblPegawai.getValueAt(tableData, 10).toString());
txtTglMasuk.setText(tblPegawai.getValueAt(tableData, 11).toString());
txtStatus.setText(tblPegawai.getValueAt(tableData, 12).toString());
cmbKdJaba.setSelectedItem(tblPegawai.getValueAt(tableData, 13).toString());
setEditStatus(false);
}//GEN-LAST:event_tblPegawaiMouseClicked
private void txtNikActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtNikActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtNikActionPerformed
private void txtNikKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_txtNikKeyReleased
txtNik.setText(txtNik.getText().toUpperCase());
}//GEN-LAST:event_txtNikKeyReleased
private void txtUsernameActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtUsernameActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtUsernameActionPerformed
private void btnTambahActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnTambahActionPerformed
if (!txtNik.getText().equals("") || !txtNama.getText().equals("")) {
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.setNama(txtNama.getText());
cb.setUsername(txtUsername.getText());
cb.setPassword(txtPassword.getText());
cb.setGender(txtGender.getText());
cb.setStatus(txtStatus.getText());
cb.setTelepon(txtTelepon.getText());
cb.setAlamat(txtAlamat.getText());
cb.setAgama(txtAgama.getText());
String tanggal = txtTglLahir.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTgllahir(date);
cb.setEmail(txtEmail.getText());
String tanggal2 = txtTglMasuk.getText();
DateFormat format2 = new SimpleDateFormat("dd-MM-yyyy");
Date date2 = null;
try {
date2 = format.parse(tanggal2);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTglmasuk(date2);
cb.setKdjab(cmbKdJaba.getSelectedItem().toString());
cb.tambahPegawai();
btnClearActionPerformed(evt);
tblPegawai.setModel(cb.getDaftarPegawai());
} else {
JOptionPane.showInternalMessageDialog(this, "Kode Pegawai dan Nama Pegawai tidak boleh kosong", "Error", JOptionPane.INFORMATION_MESSAGE);
}
}//GEN-LAST:event_btnTambahActionPerformed
private void btnCariActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCariActionPerformed
M01pegaCtrl cbm = new M01pegaCtrl();
cbm.setNik(txtNik.getText());
cbm.setNama(txtNama.getText());
cbm.setUsername(txtUsername.getText());
cbm.setGender(txtGender.getText());
cbm.setStatus(txtStatus.getText());
String tanggal = txtTglMasuk.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cbm.setTglmasuk((java.sql.Date) date);
cbm.setKdjab(cmbKdJaba.getSelectedItem().toString());
tblPegawai.setModel(cbm.getCariPegawai());
}//GEN-LAST:event_btnCariActionPerformed
private void btnEditActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnEditActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.setNama(txtNama.getText());
cb.setUsername(txtUsername.getText());
cb.setPassword(txtPassword.getText());
cb.setGender(txtGender.getText());
cb.setStatus(txtStatus.getText());
cb.setTelepon(txtTelepon.getText());
cb.setAlamat(txtAlamat.getText());
cb.setAgama(txtAgama.getText());
String tanggal = txtTglLahir.getText();
DateFormat format = new SimpleDateFormat("dd-MM-yyyy");
Date date = null;
try {
date = format.parse(tanggal);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTgllahir(date);
cb.setEmail(txtEmail.getText());
String tanggal2 = txtTglMasuk.getText();
DateFormat format2 = new SimpleDateFormat("dd-MM-yyyy");
Date date2 = null;
try {
date2 = format.parse(tanggal2);
} catch (ParseException ex) {
Logger.getLogger(M01pegaView.class.getName()).log(Level.SEVERE, null, ex);
}
cb.setTglmasuk(date2);
cb.setKdjab(cmbKdJaba.getSelectedItem().toString());
cb.editPegawai();
setEditStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb2 = new M01pegaCtrl();
tblPegawai.setModel(cb2.getDaftarPegawai());
}//GEN-LAST:event_btnEditActionPerformed
private void btnClearActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnClearActionPerformed
clearText();
}//GEN-LAST:event_btnClearActionPerformed
private void btnPrintActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnPrintActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.printPegawai();
}//GEN-LAST:event_btnPrintActionPerformed
private void btnHapusActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnHapusActionPerformed
M01pegaCtrl cb = new M01pegaCtrl();
cb.setNik(txtNik.getText());
cb.hapusPegawai();
setHapusStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb2 = new M01pegaCtrl();
tblPegawai.setModel(cb2.getDaftarPegawai());
}//GEN-LAST:event_btnHapusActionPerformed
private void btnBatalActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnBatalActionPerformed
setEditStatus(false);
btnClearActionPerformed(evt);
M01pegaCtrl cb = new M01pegaCtrl();
tblPegawai.setModel(cb.getDaftarPegawai());
}//GEN-LAST:event_btnBatalActionPerformed
private void setEditStatus(boolean status) {
if (status == false) {
btnCari.setEnabled(true);
btnTambah.setEnabled(true);
btnHapus.setEnabled(true);
btnEdit.setEnabled(false);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(false);
txtUsername.setEditable(false);
txtPassword.setEditable(false);
txtGender.setEditable(false);
txtStatus.setEditable(false);
txtTelepon.setEditable(false);
txtAlamat.setEditable(false);
txtAgama.setEditable(false);
txtTglLahir.setEditable(false);
txtEmail.setEditable(false);
txtTglMasuk.setEditable(false);
cmbKdJaba.setEnabled(false);
} else {
btnCari.setEnabled(false);
btnTambah.setEnabled(false);
btnHapus.setEnabled(true);
btnEdit.setEnabled(true);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(true);
txtUsername.setEditable(true);
txtPassword.setEditable(true);
txtGender.setEditable(true);
txtStatus.setEditable(true);
txtTelepon.setEditable(true);
txtAlamat.setEditable(true);
txtAgama.setEditable(true);
txtTglLahir.setEditable(true);
txtEmail.setEditable(true);
txtTglMasuk.setEditable(true);
cmbKdJaba.setEnabled(true);
}
}
private void clearText() {
txtNik.setText("");
txtNama.setText("");
txtUsername.setText("");
txtPassword.setText("");
txtGender.setText("");
txtStatus.setText("");
txtTelepon.setText("");
txtAlamat.setText("");
txtAgama.setText("");
txtTglLahir.setText(new SimpleDateFormat("dd-MM-yyyy").format(new Date()));
txtEmail.setText("");
txtTglMasuk.setText(new SimpleDateFormat("dd-MM-yyyy").format(new Date()));
}
private void setHapusStatus(boolean status) {
if (status == false) {
btnCari.setEnabled(true);
btnTambah.setEnabled(true);
btnClear.setEnabled(true);
btnEdit.setEnabled(false);
btnHapus.setEnabled(false);
btnBatal.setEnabled(false);
txtNik.setEditable(false);
txtNama.setEditable(true);
txtUsername.setEditable(true);
txtPassword.setEditable(true);
txtGender.setEditable(true);
txtStatus.setEditable(true);
txtTelepon.setEditable(true);
txtAlamat.setEditable(true);
txtAgama.setEditable(true);
txtTglLahir.setEditable(true);
txtEmail.setEditable(true);
txtTglMasuk.setEditable(true);
cmbKdJaba.setEnabled(true);
} else {
btnCari.setEnabled(false);
btnTambah.setEnabled(false);
btnClear.setEnabled(false);
btnEdit.setEnabled(true);
btnHapus.setEnabled(true);
btnBatal.setEnabled(true);
txtNik.setEditable(false);
txtNama.setEditable(false);
txtUsername.setEditable(false);
txtPassword.setEditable(false);
txtGender.setEditable(false);
txtStatus.setEditable(false);
txtTelepon.setEditable(false);
txtAlamat.setEditable(false);
txtAgama.setEditable(false);
txtTglLahir.setEditable(false);
txtEmail.setEditable(false);
txtTglMasuk.setEditable(false);
cmbKdJaba.setEnabled(false);
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnBatal;
private javax.swing.JButton btnCari;
private javax.swing.JButton btnClear;
private javax.swing.JButton btnEdit;
private javax.swing.JButton btnHapus;
private javax.swing.JButton btnPrint;
private javax.swing.JButton btnTambah;
private javax.swing.JComboBox cmbKdJaba;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel10;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTable tblPegawai;
private javax.swing.JTextField txtAgama;
private javax.swing.JTextField txtAlamat;
private javax.swing.JTextField txtEmail;
private javax.swing.JTextField txtGender;
private javax.swing.JTextField txtNama;
private javax.swing.JTextField txtNik;
private javax.swing.JTextField txtPassword;
private javax.swing.JTextField txtStatus;
private javax.swing.JTextField txtTelepon;
private javax.swing.JTextField txtTglLahir;
private javax.swing.JTextField txtTglMasuk;
private javax.swing.JTextField txtUsername;
// End of variables declaration//GEN-END:variables
}
| hidayanto56/KKP | KKP/src/View/M01pegaView.java | Java | apache-2.0 | 37,376 |
package net.bytebuddy.dynamic;
import lombok.EqualsAndHashCode;
import net.bytebuddy.description.annotation.AnnotationList;
import net.bytebuddy.description.annotation.AnnotationValue;
import net.bytebuddy.description.field.FieldDescription;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.method.ParameterDescription;
import net.bytebuddy.description.method.ParameterList;
import net.bytebuddy.description.modifier.ModifierContributor;
import net.bytebuddy.description.type.TypeDefinition;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.description.type.TypeList;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.none;
/**
* A transformer is responsible for transforming an object into a compatible instance of the same type.
*
* @param <T> The type of the instance being transformed.
*/
public interface Transformer<T> {
/**
* Transforms the supplied target.
*
* @param instrumentedType The instrumented type that declares the target being transformed.
* @param target The target entity that is being transformed.
* @return The transformed instance.
*/
T transform(TypeDescription instrumentedType, T target);
/**
* A non-operational transformer that returns the received instance.
*/
enum NoOp implements Transformer<Object> {
/**
* The singleton instance.
*/
INSTANCE;
/**
* Creates a transformer in a type-safe manner.
*
* @param <T> The type of the transformed object.
* @return A non-operational transformer.
*/
@SuppressWarnings("unchecked")
public static <T> Transformer<T> make() {
return (Transformer<T>) INSTANCE;
}
@Override
public Object transform(TypeDescription instrumentedType, Object target) {
return target;
}
}
/**
* A transformer for a field that delegates to another transformer that transforms a {@link net.bytebuddy.description.field.FieldDescription.Token}.
*/
@EqualsAndHashCode
class ForField implements Transformer<FieldDescription> {
/**
* The token transformer to apply to a transformed field.
*/
private final Transformer<FieldDescription.Token> transformer;
/**
* Creates a new simple field transformer.
*
* @param transformer The token transformer to apply to a transformed field.
*/
public ForField(Transformer<FieldDescription.Token> transformer) {
this.transformer = transformer;
}
/**
* Creates a field transformer that patches the transformed field by the givien modifier contributors.
*
* @param modifierContributor The modifier contributors to apply.
* @return A suitable field transformer.
*/
public static Transformer<FieldDescription> withModifiers(ModifierContributor.ForField... modifierContributor) {
return new ForField(new FieldModifierTransformer(Arrays.asList(modifierContributor)));
}
@Override
public FieldDescription transform(TypeDescription instrumentedType, FieldDescription fieldDescription) {
return new TransformedField(instrumentedType,
fieldDescription.getDeclaringType(),
transformer.transform(instrumentedType, fieldDescription.asToken(none())),
fieldDescription.asDefined());
}
/**
* A transformer for a field's modifiers.
*/
@EqualsAndHashCode
protected static class FieldModifierTransformer implements Transformer<FieldDescription.Token> {
/**
* The list of modifier contributors to apply onto the transformed field token.
*/
private final List<? extends ModifierContributor.ForField> modifierContributors;
/**
* Creates a new field token modifier for transforming a field's modifiers.
*
* @param modifierContributors The list of modifier contributors to apply onto the transformed field token.
*/
public FieldModifierTransformer(List<? extends ModifierContributor.ForField> modifierContributors) {
this.modifierContributors = modifierContributors;
}
@Override
public FieldDescription.Token transform(TypeDescription instrumentedType, FieldDescription.Token target) {
return new FieldDescription.Token(target.getName(),
ModifierContributor.Resolver.of(modifierContributors).resolve(target.getModifiers()),
target.getType(),
target.getAnnotations());
}
}
/**
* An implementation of a transformed field.
*/
protected static class TransformedField extends FieldDescription.AbstractBase {
/**
* The instrumented type for which this field is transformed.
*/
private final TypeDescription instrumentedType;
/**
* The field's declaring type.
*/
private final TypeDefinition declaringType;
/**
* A field token representing the transformed field.
*/
private final FieldDescription.Token token;
/**
* The field's defined shape.
*/
private final FieldDescription.InDefinedShape fieldDescription;
/**
* Creates a new transformed field.
*
* @param instrumentedType The instrumented type for which this field is transformed.
* @param declaringType The field's declaring type.
* @param token A field token representing the transformed field.
* @param fieldDescription The field's defined shape.
*/
protected TransformedField(TypeDescription instrumentedType,
TypeDefinition declaringType,
Token token,
InDefinedShape fieldDescription) {
this.instrumentedType = instrumentedType;
this.declaringType = declaringType;
this.token = token;
this.fieldDescription = fieldDescription;
}
@Override
public TypeDescription.Generic getType() {
return token.getType().accept(TypeDescription.Generic.Visitor.Substitutor.ForAttachment.of(instrumentedType));
}
@Override
public AnnotationList getDeclaredAnnotations() {
return token.getAnnotations();
}
@Override
public TypeDefinition getDeclaringType() {
return declaringType;
}
@Override
public int getModifiers() {
return token.getModifiers();
}
@Override
public InDefinedShape asDefined() {
return fieldDescription;
}
@Override
public String getName() {
return token.getName();
}
}
}
/**
* A transformer for a field that delegates to another transformer that transforms a {@link net.bytebuddy.description.method.MethodDescription.Token}.
*/
@EqualsAndHashCode
class ForMethod implements Transformer<MethodDescription> {
/**
* The transformer to be applied.
*/
private final Transformer<MethodDescription.Token> transformer;
/**
* Creates a new transforming method transformer.
*
* @param transformer The transformer to be applied.
*/
public ForMethod(Transformer<MethodDescription.Token> transformer) {
this.transformer = transformer;
}
/**
* Creates a transformer that enforces the supplied modifier contributors. All ranges of each contributor is first cleared and then overridden
* by the specified modifiers in the order they are supplied.
*
* @param modifierTransformer The modifier transformers in their application order.
* @return A method transformer where each method's modifiers are adapted to the given modifiers.
*/
public static Transformer<MethodDescription> withModifiers(ModifierContributor.ForMethod... modifierTransformer) {
return new ForMethod(new MethodModifierTransformer(Arrays.asList(modifierTransformer)));
}
@Override
public MethodDescription transform(TypeDescription instrumentedType, MethodDescription methodDescription) {
return new TransformedMethod(instrumentedType,
methodDescription.getDeclaringType(),
transformer.transform(instrumentedType, methodDescription.asToken(none())),
methodDescription.asDefined());
}
/**
* A transformer for a method's modifiers.
*/
@EqualsAndHashCode
protected static class MethodModifierTransformer implements Transformer<MethodDescription.Token> {
/**
* The modifier contributors to apply on each transformation.
*/
private final List<? extends ModifierContributor.ForMethod> modifierContributors;
/**
* Creates a new modifier transformation.
*
* @param modifierContributors The modifier contributors to apply on each transformation in their application order.
*/
public MethodModifierTransformer(List<? extends ModifierContributor.ForMethod> modifierContributors) {
this.modifierContributors = modifierContributors;
}
@Override
public MethodDescription.Token transform(TypeDescription instrumentedType, MethodDescription.Token target) {
return new MethodDescription.Token(target.getName(),
ModifierContributor.Resolver.of(modifierContributors).resolve(target.getModifiers()),
target.getTypeVariableTokens(),
target.getReturnType(),
target.getParameterTokens(),
target.getExceptionTypes(),
target.getAnnotations(),
target.getDefaultValue(),
target.getReceiverType());
}
}
/**
* The transformed method.
*/
protected static class TransformedMethod extends MethodDescription.AbstractBase {
/**
* The instrumented type for which this method is transformed.
*/
private final TypeDescription instrumentedType;
/**
* The method's declaring type.
*/
private final TypeDefinition declaringType;
/**
* The method representing the transformed method.
*/
private final MethodDescription.Token token;
/**
* The defined shape of the transformed method.
*/
private final MethodDescription.InDefinedShape methodDescription;
/**
* Creates a new transformed method.
*
* @param instrumentedType The instrumented type for which this method is transformed.
* @param declaringType The method's declaring type.
* @param token The method representing the transformed method.
* @param methodDescription The defined shape of the transformed method.
*/
protected TransformedMethod(TypeDescription instrumentedType,
TypeDefinition declaringType,
Token token,
InDefinedShape methodDescription) {
this.instrumentedType = instrumentedType;
this.declaringType = declaringType;
this.token = token;
this.methodDescription = methodDescription;
}
@Override
public TypeList.Generic getTypeVariables() {
return new TypeList.Generic.ForDetachedTypes.OfTypeVariables(this, token.getTypeVariableTokens(), new AttachmentVisitor());
}
@Override
public TypeDescription.Generic getReturnType() {
return token.getReturnType().accept(new AttachmentVisitor());
}
@Override
public ParameterList<?> getParameters() {
return new TransformedParameterList();
}
@Override
public TypeList.Generic getExceptionTypes() {
return new TypeList.Generic.ForDetachedTypes(token.getExceptionTypes(), new AttachmentVisitor());
}
@Override
public AnnotationList getDeclaredAnnotations() {
return token.getAnnotations();
}
@Override
public String getInternalName() {
return token.getName();
}
@Override
public TypeDefinition getDeclaringType() {
return declaringType;
}
@Override
public int getModifiers() {
return token.getModifiers();
}
@Override
public AnnotationValue<?, ?> getDefaultValue() {
return token.getDefaultValue();
}
@Override
public InDefinedShape asDefined() {
return methodDescription;
}
@Override
public TypeDescription.Generic getReceiverType() {
TypeDescription.Generic receiverType = token.getReceiverType();
return receiverType == null
? TypeDescription.Generic.UNDEFINED
: receiverType.accept(new AttachmentVisitor());
}
/**
* A parameter list representing the transformed method's parameters.
*/
protected class TransformedParameterList extends ParameterList.AbstractBase<ParameterDescription> {
@Override
public ParameterDescription get(int index) {
return new TransformedParameter(index, token.getParameterTokens().get(index));
}
@Override
public int size() {
return token.getParameterTokens().size();
}
}
/**
* A transformed method's parameter.
*/
protected class TransformedParameter extends ParameterDescription.AbstractBase {
/**
* The index of the transformed method.
*/
private final int index;
/**
* The token representing the transformed method parameter's properties.
*/
private final ParameterDescription.Token parameterToken;
/**
* Creates a transformed parameter.
*
* @param index The index of the transformed method.
* @param parameterToken The token representing the transformed method parameter's properties.
*/
protected TransformedParameter(int index, ParameterDescription.Token parameterToken) {
this.index = index;
this.parameterToken = parameterToken;
}
@Override
public TypeDescription.Generic getType() {
return parameterToken.getType().accept(new AttachmentVisitor());
}
@Override
public MethodDescription getDeclaringMethod() {
return TransformedMethod.this;
}
@Override
public int getIndex() {
return index;
}
@Override
public boolean isNamed() {
return parameterToken.getName() != null;
}
@Override
public boolean hasModifiers() {
return parameterToken.getModifiers() != null;
}
@Override
public String getName() {
return isNamed()
? parameterToken.getName()
: super.getName();
}
@Override
public int getModifiers() {
return hasModifiers()
? parameterToken.getModifiers()
: super.getModifiers();
}
@Override
public AnnotationList getDeclaredAnnotations() {
return parameterToken.getAnnotations();
}
@Override
public InDefinedShape asDefined() {
return methodDescription.getParameters().get(index);
}
}
/**
* A visitor that attaches type variables based on the transformed method's type variables and the instrumented type. Binding type
* variables directly for this method is not possible as type variables are already resolved for the instrumented type such
* that it is required to bind variables for the instrumented type directly.
*/
protected class AttachmentVisitor extends TypeDescription.Generic.Visitor.Substitutor.WithoutTypeSubstitution {
@Override
public TypeDescription.Generic onTypeVariable(TypeDescription.Generic typeVariable) {
TypeList.Generic candidates = getTypeVariables().filter(named(typeVariable.getSymbol()));
TypeDescription.Generic attached = candidates.isEmpty()
? instrumentedType.findVariable(typeVariable.getSymbol())
: candidates.getOnly();
if (attached == null) {
throw new IllegalArgumentException("Cannot attach undefined variable: " + typeVariable);
} else {
return new TypeDescription.Generic.OfTypeVariable.WithAnnotationOverlay(attached, typeVariable);
}
}
@Override
public int hashCode() {
return TransformedMethod.this.hashCode();
}
@Override
public boolean equals(Object other) {
return this == other || (other instanceof AttachmentVisitor && ((AttachmentVisitor) other).getOuter().equals(TransformedMethod.this));
}
/**
* Returns the outer instance.
*
* @return The outer instance.
*/
private TransformedMethod getOuter() {
return TransformedMethod.this;
}
}
}
}
/**
* A compound transformer.
*
* @param <S> The type of the transformed instance.
*/
@EqualsAndHashCode
class Compound<S> implements Transformer<S> {
/**
* The list of transformers to apply in their application order.
*/
private final List<Transformer<S>> transformers;
/**
* Creates a new compound transformer.
*
* @param transformer The list of transformers to apply in their application order.
*/
@SuppressWarnings("unchecked") // In absence of @SafeVarargs for Java 6
public Compound(Transformer<S>... transformer) {
this(Arrays.asList(transformer));
}
/**
* Creates a new compound transformer.
*
* @param transformers The list of transformers to apply in their application order.
*/
public Compound(List<? extends Transformer<S>> transformers) {
this.transformers = new ArrayList<Transformer<S>>();
for (Transformer<S> transformer : transformers) {
if (transformer instanceof Compound) {
this.transformers.addAll(((Compound<S>) transformer).transformers);
} else if (!(transformer instanceof NoOp)) {
this.transformers.add(transformer);
}
}
}
@Override
public S transform(TypeDescription instrumentedType, S target) {
for (Transformer<S> transformer : transformers) {
target = transformer.transform(instrumentedType, target);
}
return target;
}
}
}
| mches/byte-buddy | byte-buddy-dep/src/main/java/net/bytebuddy/dynamic/Transformer.java | Java | apache-2.0 | 21,436 |
import {
defaultReactions,
details,
mergeInEmptyDraft,
} from "../detail-definitions.js";
import vocab from "../../app/service/vocab.js";
export const persona = {
identifier: "persona",
label: "Persona",
icon: undefined, //No Icon For Persona UseCase (uses identicon)
draft: {
...mergeInEmptyDraft({
content: {
type: [vocab.WON.PersonaCompacted],
sockets: {
"#chatSocket": vocab.CHAT.ChatSocketCompacted,
"#holderSocket": vocab.HOLD.HolderSocketCompacted,
"#buddySocket": vocab.BUDDY.BuddySocketCompacted,
// "#worksForSocket": vocab.WXSCHEMA.WorksForSocketCompacted, // TODO: Currently not in use in favour of more generic member -> Role -> member relation
"#memberOfSocket": vocab.WXSCHEMA.MemberOfSocketCompacted,
"#interestSocket": vocab.WXPERSONA.InterestSocketCompacted,
"#expertiseSocket": vocab.WXPERSONA.ExpertiseSocketCompacted,
// "#sReviewSocket": vocab.WXSCHEMA.ReviewSocketCompacted, //TODO: exclude the ability to review a persona for now
"#sEventSocket": vocab.WXSCHEMA.EventSocketCompacted,
"#sAttendeeInverseSocket":
vocab.WXSCHEMA.AttendeeInverseSocketCompacted,
// "#PrimaryAccountableOfSocket":
// vocab.WXVALUEFLOWS.PrimaryAccountableOfSocketCompacted,
// "#CustodianOfSocket": vocab.WXVALUEFLOWS.CustodianOfSocketCompacted,
// "#ActorActivitySocket":
// vocab.WXVALUEFLOWS.ActorActivitySocketCompacted, //TODO VALUEFLOWS SOCKETS CURRENTLY EXCLUDED
},
},
seeks: {},
}),
},
reactions: {
...defaultReactions,
// [vocab.WXVALUEFLOWS.ActorActivitySocketCompacted]: {
// [vocab.WXVALUEFLOWS.ActorSocketCompacted]: {
// useCaseIdentifiers: ["activity"],
// },
// },
// [vocab.WXVALUEFLOWS.PrimaryAccountableOfSocketCompacted]: {
// [vocab.WXVALUEFLOWS.PrimaryAccountableSocketCompacted]: {
// useCaseIdentifiers: ["resource"],
// },
// },
// [vocab.WXVALUEFLOWS.CustodianOfSocketCompacted]: {
// [vocab.WXVALUEFLOWS.CustodianSocketCompacted]: {
// useCaseIdentifiers: ["resource"],
// },
// },
// [vocab.WXVALUEFLOWS.ResourceActivitySocketCompacted]: {
// [vocab.WXVALUEFLOWS.ActorSocket]: {
// useCaseIdentifiers: ["action"],
// },
// },
// TODO: Currently not in use in favour of more generic member -> Role -> member relation
// [vocab.WXSCHEMA.WorksForSocketCompacted]: {
// [vocab.WXSCHEMA.WorksForInverseSocketCompacted]: {
// useCaseIdentifiers: ["organization"],
// },
// },
[vocab.WXSCHEMA.MemberOfSocketCompacted]: {
[vocab.WXSCHEMA.MemberSocketCompacted]: {
useCaseIdentifiers: ["organization"],
labels: {
owned: {
default: "Organization",
addNew: "Add to New Organization",
picker: "Pick a Organization to join",
},
nonOwned: {
default: "Organization",
addNew: "Add to New Organization",
picker: "Pick a Organization to join",
},
},
},
},
[vocab.HOLD.HolderSocketCompacted]: {
[vocab.HOLD.HoldableSocketCompacted]: {
useCaseIdentifiers: ["*"],
refuseNonOwned: true,
},
},
[vocab.WXPERSONA.InterestSocketCompacted]: {
[vocab.WXPERSONA.InterestOfSocketCompacted]: {
useCaseIdentifiers: [
"afterpartyInterest",
"breakfastInterest",
"cyclingInterest",
"genericInterest",
"lunchInterest",
"pokemonInterest",
"sightseeingInterest",
],
refuseNonOwned: true,
},
},
[vocab.WXPERSONA.ExpertiseSocketCompacted]: {
[vocab.WXPERSONA.ExpertiseOfSocketCompacted]: {
useCaseIdentifiers: ["*"],
refuseNonOwned: true,
},
},
[vocab.CHAT.ChatSocketCompacted]: {
[vocab.CHAT.ChatSocketCompacted]: {
useCaseIdentifiers: ["persona"],
refuseOwned: true,
},
[vocab.GROUP.GroupSocketCompacted]: {
useCaseIdentifiers: ["*"],
},
},
[vocab.WXSCHEMA.AttendeeInverseSocketCompacted]: {
[vocab.WXSCHEMA.AttendeeSocketCompacted]: {
useCaseIdentifiers: ["event"],
labels: {
owned: {
default: "Join Event",
addNew: "Join New Event",
picker: "Pick an Event to join",
},
nonOwned: {
default: "Join Event",
addNew: "Join New Event",
picker: "Pick an Event to join",
},
},
},
},
},
details: {
personaName: { ...details.personaName, mandatory: true },
description: { ...details.description },
website: { ...details.website },
images: { ...details.images },
location: { ...details.location },
},
seeksDetails: {},
};
| researchstudio-sat/webofneeds | webofneeds/won-owner-webapp/src/main/webapp/config/usecases/uc-persona.js | JavaScript | apache-2.0 | 4,969 |
// Code generated by Microsoft (R) AutoRest Code Generator 0.9.7.0
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
using System;
using System.Linq;
using System.Net.Http;
using LeapDayTinkering.RaspberryPi;
using Microsoft.Rest;
namespace LeapDayTinkering.RaspberryPi
{
public partial class LeapDayTinkeringAppService : ServiceClient<LeapDayTinkeringAppService>, ILeapDayTinkeringAppService
{
private Uri _baseUri;
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri
{
get { return this._baseUri; }
set { this._baseUri = value; }
}
private ServiceClientCredentials _credentials;
/// <summary>
/// Credentials for authenticating with the service.
/// </summary>
public ServiceClientCredentials Credentials
{
get { return this._credentials; }
set { this._credentials = value; }
}
private IDevice _device;
public virtual IDevice Device
{
get { return this._device; }
}
private ISensor _sensor;
public virtual ISensor Sensor
{
get { return this._sensor; }
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
public LeapDayTinkeringAppService()
: base()
{
this._device = new Device(this);
this._sensor = new Sensor(this);
this._baseUri = new Uri("https://leapdaytinkeringappservice.azurewebsites.net");
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
/// <param name='handlers'>
/// Optional. The set of delegating handlers to insert in the http
/// client pipeline.
/// </param>
public LeapDayTinkeringAppService(params DelegatingHandler[] handlers)
: base(handlers)
{
this._device = new Device(this);
this._sensor = new Sensor(this);
this._baseUri = new Uri("https://leapdaytinkeringappservice.azurewebsites.net");
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The set of delegating handlers to insert in the http
/// client pipeline.
/// </param>
public LeapDayTinkeringAppService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers)
: base(rootHandler, handlers)
{
this._device = new Device(this);
this._sensor = new Sensor(this);
this._baseUri = new Uri("https://leapdaytinkeringappservice.azurewebsites.net");
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The set of delegating handlers to insert in the http
/// client pipeline.
/// </param>
public LeapDayTinkeringAppService(Uri baseUri, params DelegatingHandler[] handlers)
: this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._baseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials for authenticating with the service.
/// </param>
/// <param name='handlers'>
/// Optional. The set of delegating handlers to insert in the http
/// client pipeline.
/// </param>
public LeapDayTinkeringAppService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers)
: this(handlers)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the LeapDayTinkeringAppService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials for authenticating with the service.
/// </param>
/// <param name='handlers'>
/// Optional. The set of delegating handlers to insert in the http
/// client pipeline.
/// </param>
public LeapDayTinkeringAppService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers)
: this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._baseUri = baseUri;
this._credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
}
}
| bradygaster/LeapDayTinkering | LeapDayTinkering.RaspberryPi/LeapDayTinkeringAppService/LeapDayTinkeringAppService.cs | C# | apache-2.0 | 5,952 |
/**
* \file dcs/math/stats/function/prediction_interval.hpp
*
* \brief Computes a two-sided prediction interval at the specified level.
*
* \author Marco Guazzone (marco.guazzone@gmail.com)
*
* <hr/>
*
* Copyright 2009 Marco Guazzone (marco.guazzone@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef DCS_MATH_STATS_FUNCTION_PREDICTION_INTERVAL_HPP
#define DCS_MATH_STATS_FUNCTION_PREDICTION_INTERVAL_HPP
#include <utility>
namespace dcs { namespace math { namespace stats {
/**
* \brief Computes a two-sided prediction interval at the specified level.
*
* \tparam SampleT The sample type.
* \tparam ValueT The value type.
*
* \param sample The sample over which computing the prediction interval.
* \param level The level at which computing the prediction interval.
* \return A pair of values (of type ValueT) given the lower and upper limits of
* the prediction interval at level \a level.
*
* \author Marco Guazzone (marco.guazzone@gmail.com)
*/
template <typename SampleT, typename ValueT>
::std::pair<ValueT,ValueT> prediction_interval(SampleT const& sample, ValueT level=0.95);
}}} // Namespace dcs::math::stats
#endif // DCS_MATH_STATS_FUNCTION_PREDICTION_INTERVAL_HPP
| sguazt/dcsxx-commons | inc/dcs/math/stats/function/prediction_interval.hpp | C++ | apache-2.0 | 1,737 |
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Template10.Mvvm;
using Windows.UI.Xaml.Navigation;
namespace Sample.ViewModels
{
public class MainPageViewModel : ViewModelBase
{
Services.MessageService.MessageService _MessageService;
public MainPageViewModel()
{
if (!Windows.ApplicationModel.DesignMode.DesignModeEnabled)
_MessageService = new Services.MessageService.MessageService();
}
public override void OnNavigatedTo(object parameter, NavigationMode mode, IDictionary<string, object> state)
{
Messages = _MessageService.GetMessages();
Selected = Messages.First();
}
ObservableCollection<Models.Message> _Messages = default(ObservableCollection<Models.Message>);
public ObservableCollection<Models.Message> Messages { get { return _Messages; } private set { Set(ref _Messages, value); } }
string _SearchText = default(string);
public string SearchText { get { return _SearchText; } set { Set(ref _SearchText, value); } }
Models.Message _Selected = default(Models.Message);
public Models.Message Selected
{
get { return _Selected; }
set
{
Set(ref _Selected, value);
if (value != null)
value.IsRead = true;
DeleteCommand.RaiseCanExecuteChanged();
}
}
DelegateCommand _DeleteCommand;
public DelegateCommand DeleteCommand => _DeleteCommand ?? (_DeleteCommand = new DelegateCommand(() =>
{
if (Selected != null)
{
_MessageService.DeleteMessage(Selected);
Selected = null;
}
}, () => { return Selected != null; }));
DelegateCommand _SearchCommand;
public DelegateCommand SearchCommand => _SearchCommand ?? (_SearchCommand = new DelegateCommand(() =>
{
Messages = _MessageService.Search(SearchText);
}));
DelegateCommand _ClearCommand;
public DelegateCommand ClearCommand => _ClearCommand ?? (_ClearCommand = new DelegateCommand(() =>
{
Messages = _MessageService.Search(SearchText = string.Empty);
}));
}
}
| MizzleMo/Template10 | Samples/MasterDetail/ViewModels/MainPageViewModel.cs | C# | apache-2.0 | 3,055 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/enums/frequency_cap_level.proto
namespace Google\Ads\GoogleAds\V9\Enums\FrequencyCapLevelEnum;
use UnexpectedValueException;
/**
* The level on which the cap is to be applied (e.g ad group ad, ad group).
* Cap is applied to all the resources of this level.
*
* Protobuf type <code>google.ads.googleads.v9.enums.FrequencyCapLevelEnum.FrequencyCapLevel</code>
*/
class FrequencyCapLevel
{
/**
* Not specified.
*
* Generated from protobuf enum <code>UNSPECIFIED = 0;</code>
*/
const UNSPECIFIED = 0;
/**
* Used for return value only. Represents value unknown in this version.
*
* Generated from protobuf enum <code>UNKNOWN = 1;</code>
*/
const UNKNOWN = 1;
/**
* The cap is applied at the ad group ad level.
*
* Generated from protobuf enum <code>AD_GROUP_AD = 2;</code>
*/
const AD_GROUP_AD = 2;
/**
* The cap is applied at the ad group level.
*
* Generated from protobuf enum <code>AD_GROUP = 3;</code>
*/
const AD_GROUP = 3;
/**
* The cap is applied at the campaign level.
*
* Generated from protobuf enum <code>CAMPAIGN = 4;</code>
*/
const CAMPAIGN = 4;
private static $valueToName = [
self::UNSPECIFIED => 'UNSPECIFIED',
self::UNKNOWN => 'UNKNOWN',
self::AD_GROUP_AD => 'AD_GROUP_AD',
self::AD_GROUP => 'AD_GROUP',
self::CAMPAIGN => 'CAMPAIGN',
];
public static function name($value)
{
if (!isset(self::$valueToName[$value])) {
throw new UnexpectedValueException(sprintf(
'Enum %s has no name defined for value %s', __CLASS__, $value));
}
return self::$valueToName[$value];
}
public static function value($name)
{
$const = __CLASS__ . '::' . strtoupper($name);
if (!defined($const)) {
throw new UnexpectedValueException(sprintf(
'Enum %s has no value defined for name %s', __CLASS__, $name));
}
return constant($const);
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(FrequencyCapLevel::class, \Google\Ads\GoogleAds\V9\Enums\FrequencyCapLevelEnum_FrequencyCapLevel::class);
| googleads/google-ads-php | src/Google/Ads/GoogleAds/V9/Enums/FrequencyCapLevelEnum/FrequencyCapLevel.php | PHP | apache-2.0 | 2,372 |
package net.webservicex;
import java.net.MalformedURLException;
import java.net.URL;
import javax.xml.namespace.QName;
import javax.xml.ws.WebEndpoint;
import javax.xml.ws.WebServiceClient;
import javax.xml.ws.WebServiceFeature;
import javax.xml.ws.Service;
/**
* This class was generated by Apache CXF 3.1.6
* 2016-04-19T12:37:26.508+03:00
* Generated source version: 3.1.6
*
*/
@WebServiceClient(name = "GeoIPService",
wsdlLocation = "file:src/main/resources/geoipservice.wsdl",
targetNamespace = "http://www.webservicex.net/")
public class GeoIPService extends Service {
public final static URL WSDL_LOCATION;
public final static QName SERVICE = new QName("http://www.webservicex.net/", "GeoIPService");
public final static QName GeoIPServiceSoap = new QName("http://www.webservicex.net/", "GeoIPServiceSoap");
public final static QName GeoIPServiceSoap12 = new QName("http://www.webservicex.net/", "GeoIPServiceSoap12");
public final static QName GeoIPServiceHttpGet = new QName("http://www.webservicex.net/", "GeoIPServiceHttpGet");
public final static QName GeoIPServiceHttpPost = new QName("http://www.webservicex.net/", "GeoIPServiceHttpPost");
static {
URL url = null;
try {
url = new URL("file:src/main/resources/geoipservice.wsdl");
} catch (MalformedURLException e) {
java.util.logging.Logger.getLogger(GeoIPService.class.getName())
.log(java.util.logging.Level.INFO,
"Can not initialize the default wsdl from {0}", "file:src/main/resources/geoipservice.wsdl");
}
WSDL_LOCATION = url;
}
public GeoIPService(URL wsdlLocation) {
super(wsdlLocation, SERVICE);
}
public GeoIPService(URL wsdlLocation, QName serviceName) {
super(wsdlLocation, serviceName);
}
public GeoIPService() {
super(WSDL_LOCATION, SERVICE);
}
public GeoIPService(WebServiceFeature ... features) {
super(WSDL_LOCATION, SERVICE, features);
}
public GeoIPService(URL wsdlLocation, WebServiceFeature ... features) {
super(wsdlLocation, SERVICE, features);
}
public GeoIPService(URL wsdlLocation, QName serviceName, WebServiceFeature ... features) {
super(wsdlLocation, serviceName, features);
}
/**
*
* @return
* returns GeoIPServiceSoap
*/
@WebEndpoint(name = "GeoIPServiceSoap")
public GeoIPServiceSoap getGeoIPServiceSoap() {
return super.getPort(GeoIPServiceSoap, GeoIPServiceSoap.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns GeoIPServiceSoap
*/
@WebEndpoint(name = "GeoIPServiceSoap")
public GeoIPServiceSoap getGeoIPServiceSoap(WebServiceFeature... features) {
return super.getPort(GeoIPServiceSoap, GeoIPServiceSoap.class, features);
}
/**
*
* @return
* returns GeoIPServiceSoap
*/
@WebEndpoint(name = "GeoIPServiceSoap12")
public GeoIPServiceSoap getGeoIPServiceSoap12() {
return super.getPort(GeoIPServiceSoap12, GeoIPServiceSoap.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns GeoIPServiceSoap
*/
@WebEndpoint(name = "GeoIPServiceSoap12")
public GeoIPServiceSoap getGeoIPServiceSoap12(WebServiceFeature... features) {
return super.getPort(GeoIPServiceSoap12, GeoIPServiceSoap.class, features);
}
/**
*
* @return
* returns GeoIPServiceHttpGet
*/
@WebEndpoint(name = "GeoIPServiceHttpGet")
public GeoIPServiceHttpGet getGeoIPServiceHttpGet() {
return super.getPort(GeoIPServiceHttpGet, GeoIPServiceHttpGet.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns GeoIPServiceHttpGet
*/
@WebEndpoint(name = "GeoIPServiceHttpGet")
public GeoIPServiceHttpGet getGeoIPServiceHttpGet(WebServiceFeature... features) {
return super.getPort(GeoIPServiceHttpGet, GeoIPServiceHttpGet.class, features);
}
/**
*
* @return
* returns GeoIPServiceHttpPost
*/
@WebEndpoint(name = "GeoIPServiceHttpPost")
public GeoIPServiceHttpPost getGeoIPServiceHttpPost() {
return super.getPort(GeoIPServiceHttpPost, GeoIPServiceHttpPost.class);
}
/**
*
* @param features
* A list of {@link javax.xml.ws.WebServiceFeature} to configure on the proxy. Supported features not in the <code>features</code> parameter will have their default values.
* @return
* returns GeoIPServiceHttpPost
*/
@WebEndpoint(name = "GeoIPServiceHttpPost")
public GeoIPServiceHttpPost getGeoIPServiceHttpPost(WebServiceFeature... features) {
return super.getPort(GeoIPServiceHttpPost, GeoIPServiceHttpPost.class, features);
}
}
| OlgaZueva/Java_PFT | soap-sample/src/main/java/net/webservicex/GeoIPService.java | Java | apache-2.0 | 5,440 |
package me.pjq.jacocoandroid.dummy;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Helper class for providing sample content for user interfaces created by
* Android template wizards.
* <p>
* TODO: Replace all uses of this class before publishing your app.
*/
public class DummyContent {
/**
* An array of sample (dummy) items.
*/
public static List<DummyItem> ITEMS = new ArrayList<DummyItem>();
/**
* A map of sample (dummy) items, by ID.
*/
public static Map<String, DummyItem> ITEM_MAP = new HashMap<String, DummyItem>();
static {
// Add 3 sample items.
addItem(new DummyItem("1", "Item 1"));
addItem(new DummyItem("2", "Item 2"));
addItem(new DummyItem("3", "Item 3"));
}
private static void addItem(DummyItem item) {
ITEMS.add(item);
ITEM_MAP.put(item.id, item);
}
/**
* A dummy item representing a piece of content.
*/
public static class DummyItem {
public String id;
public String content;
public DummyItem(String id, String content) {
this.id = id;
this.content = content;
}
@Override
public String toString() {
return content;
}
}
}
| pjq/androidjacoco | app/src/main/java/me/pjq/jacocoandroid/dummy/DummyContent.java | Java | apache-2.0 | 1,330 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.user.client.ui;
import com.google.gwt.dom.builder.shared.HtmlBuilderFactory;
import com.google.gwt.dom.builder.shared.HtmlSpanBuilder;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.gwt.safehtml.shared.SafeHtmlBuilder;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
/**
* A type of widget that can wrap another widget, hiding the wrapped widget's
* methods. When added to a panel, a composite behaves exactly as if the widget
* it wraps had been added.
*
* <p>
* The composite is useful for creating a single widget out of an aggregate of
* multiple other widgets contained in a single panel.
* </p>
*
* <p>
* <h3>Example</h3>
* {@example com.google.gwt.examples.CompositeExample}
* </p>
*/
public abstract class Composite extends Widget implements IsRenderable {
private Widget widget;
private IsRenderable renderable;
private Element elementToWrap;
@Override
public void claimElement(Element element) {
if (renderable != null) {
renderable.claimElement(element);
setElement(widget.getElement());
} else {
this.elementToWrap = element;
}
}
@Override
public void initializeClaimedElement() {
if (renderable != null) {
renderable.initializeClaimedElement();
} else {
elementToWrap.getParentNode().replaceChild(widget.getElement(), elementToWrap);
}
}
@Override
public boolean isAttached() {
if (widget != null) {
return widget.isAttached();
}
return false;
}
@Override
public void onBrowserEvent(Event event) {
// Fire any handler added to the composite itself.
super.onBrowserEvent(event);
// Delegate events to the widget.
widget.onBrowserEvent(event);
}
@Override
public SafeHtml render(RenderableStamper stamper) {
if (renderable != null) {
return renderable.render(stamper);
} else {
HtmlSpanBuilder spanBuilder = HtmlBuilderFactory.get()
.createSpanBuilder();
stamper.stamp(spanBuilder).end();
return spanBuilder.asSafeHtml();
}
}
@Override
public void render(RenderableStamper stamper, SafeHtmlBuilder builder) {
if (renderable != null) {
renderable.render(stamper, builder);
} else {
builder.append(render(stamper));
}
}
/**
* Provides subclasses access to the topmost widget that defines this
* composite.
*
* @return the widget
*/
protected Widget getWidget() {
return widget;
}
/**
* Sets the widget to be wrapped by the composite. The wrapped widget must be
* set before calling any {@link Widget} methods on this object, or adding it
* to a panel. This method may only be called once for a given composite.
*
* @param widget the widget to be wrapped
*/
protected void initWidget(Widget widget) {
// Validate. Make sure the widget is not being set twice.
if (this.widget != null) {
throw new IllegalStateException("Composite.initWidget() may only be "
+ "called once.");
}
if (widget instanceof IsRenderable) {
// In case the Widget being wrapped is an IsRenderable, we save that fact.
this.renderable = (IsRenderable) widget;
}
// Detach the new child.
widget.removeFromParent();
// Use the contained widget's element as the composite's element,
// effectively merging them within the DOM.
Element elem = widget.getElement();
setElement(elem);
if (PotentialElement.isPotential(elem)) {
PotentialElement.as(elem).setResolver(this);
}
// Logical attach.
this.widget = widget;
// Adopt.
widget.setParent(this);
}
@Override
protected void onAttach() {
if (!isOrWasAttached()) {
widget.sinkEvents(eventsToSink);
eventsToSink = -1;
}
widget.onAttach();
// Clobber the widget's call to setEventListener(), causing all events to
// be routed to this composite, which will delegate back to the widget by
// default (note: it's not necessary to clear this in onDetach(), because
// the widget's onDetach will do so).
DOM.setEventListener(getElement(), this);
// Call onLoad() directly, because we're not calling super.onAttach().
onLoad();
AttachEvent.fire(this, true);
}
@Override
protected void onDetach() {
try {
onUnload();
AttachEvent.fire(this, false);
} finally {
// We don't want an exception in user code to keep us from calling the
// super implementation (or event listeners won't get cleaned up and
// the attached flag will be wrong).
widget.onDetach();
}
}
@Override
protected Element resolvePotentialElement() {
setElement(widget.resolvePotentialElement());
return getElement();
}
/**
* This method was for initializing the Widget to be wrapped by this
* Composite, but has been deprecated in favor of {@link #initWidget(Widget)}.
*
* @deprecated Use {@link #initWidget(Widget)} instead
*/
@Deprecated
protected void setWidget(Widget widget) {
initWidget(widget);
}
}
| syntelos/gwtcc | src/com/google/gwt/user/client/ui/Composite.java | Java | apache-2.0 | 5,804 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudformation.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudformation-2010-05-15/DescribeStackSet" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeStackSetRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name or unique ID of the stack set whose description you want.
* </p>
*/
private String stackSetName;
/**
* <p>
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the organization's
* management account or as a delegated administrator in a member account.
* </p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management account. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* </ul>
*/
private String callAs;
/**
* <p>
* The name or unique ID of the stack set whose description you want.
* </p>
*
* @param stackSetName
* The name or unique ID of the stack set whose description you want.
*/
public void setStackSetName(String stackSetName) {
this.stackSetName = stackSetName;
}
/**
* <p>
* The name or unique ID of the stack set whose description you want.
* </p>
*
* @return The name or unique ID of the stack set whose description you want.
*/
public String getStackSetName() {
return this.stackSetName;
}
/**
* <p>
* The name or unique ID of the stack set whose description you want.
* </p>
*
* @param stackSetName
* The name or unique ID of the stack set whose description you want.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeStackSetRequest withStackSetName(String stackSetName) {
setStackSetName(stackSetName);
return this;
}
/**
* <p>
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the organization's
* management account or as a delegated administrator in a member account.
* </p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management account. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* </ul>
*
* @param callAs
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the
* organization's management account or as a delegated administrator in a member account.</p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed
* permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management
* account. For more information, see <a href=
* "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* @see CallAs
*/
public void setCallAs(String callAs) {
this.callAs = callAs;
}
/**
* <p>
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the organization's
* management account or as a delegated administrator in a member account.
* </p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management account. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* </ul>
*
* @return [Service-managed permissions] Specifies whether you are acting as an account administrator in the
* organization's management account or as a delegated administrator in a member account.</p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed
* permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management
* account. For more information, see <a href=
* "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* @see CallAs
*/
public String getCallAs() {
return this.callAs;
}
/**
* <p>
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the organization's
* management account or as a delegated administrator in a member account.
* </p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management account. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* </ul>
*
* @param callAs
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the
* organization's management account or as a delegated administrator in a member account.</p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed
* permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management
* account. For more information, see <a href=
* "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see CallAs
*/
public DescribeStackSetRequest withCallAs(String callAs) {
setCallAs(callAs);
return this;
}
/**
* <p>
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the organization's
* management account or as a delegated administrator in a member account.
* </p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management account. For
* more information, see <a
* href="https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* </ul>
*
* @param callAs
* [Service-managed permissions] Specifies whether you are acting as an account administrator in the
* organization's management account or as a delegated administrator in a member account.</p>
* <p>
* By default, <code>SELF</code> is specified. Use <code>SELF</code> for stack sets with self-managed
* permissions.
* </p>
* <ul>
* <li>
* <p>
* If you are signed in to the management account, specify <code>SELF</code>.
* </p>
* </li>
* <li>
* <p>
* If you are signed in to a delegated administrator account, specify <code>DELEGATED_ADMIN</code>.
* </p>
* <p>
* Your Amazon Web Services account must be registered as a delegated administrator in the management
* account. For more information, see <a href=
* "https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/stacksets-orgs-delegated-admin.html"
* >Register a delegated administrator</a> in the <i>CloudFormation User Guide</i>.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see CallAs
*/
public DescribeStackSetRequest withCallAs(CallAs callAs) {
this.callAs = callAs.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStackSetName() != null)
sb.append("StackSetName: ").append(getStackSetName()).append(",");
if (getCallAs() != null)
sb.append("CallAs: ").append(getCallAs());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeStackSetRequest == false)
return false;
DescribeStackSetRequest other = (DescribeStackSetRequest) obj;
if (other.getStackSetName() == null ^ this.getStackSetName() == null)
return false;
if (other.getStackSetName() != null && other.getStackSetName().equals(this.getStackSetName()) == false)
return false;
if (other.getCallAs() == null ^ this.getCallAs() == null)
return false;
if (other.getCallAs() != null && other.getCallAs().equals(this.getCallAs()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStackSetName() == null) ? 0 : getStackSetName().hashCode());
hashCode = prime * hashCode + ((getCallAs() == null) ? 0 : getCallAs().hashCode());
return hashCode;
}
@Override
public DescribeStackSetRequest clone() {
return (DescribeStackSetRequest) super.clone();
}
}
| aws/aws-sdk-java | aws-java-sdk-cloudformation/src/main/java/com/amazonaws/services/cloudformation/model/DescribeStackSetRequest.java | Java | apache-2.0 | 15,108 |
package main
import (
"io/ioutil"
"log"
"os"
"os/exec"
"github.com/drone/drone-plugin-go/plugin"
)
// SSHConfig the config used on the test runner
var SSHConfig = `Host *
StrictHostKeyChecking no`
type deployer struct {
Task string `json:"task"`
Stage string `json:"stage"`
}
func main() {
d := new(deployer)
w := new(plugin.Workspace)
plugin.Param("vargs", d)
plugin.Param("workspace", w)
plugin.MustParse()
// Save ssh keys
if err := os.MkdirAll("/root/.ssh", 0700); err != nil {
log.Fatal(err)
}
if err := ioutil.WriteFile("/root/.ssh/config", []byte(SSHConfig), 0644); err != nil {
log.Fatal(err)
}
if err := ioutil.WriteFile("/root/.ssh/id_rsa", []byte(w.Keys.Private), 0600); err != nil {
log.Fatal(err)
}
if err := ioutil.WriteFile("/root/.ssh/id_rsa.pub", []byte(w.Keys.Public), 0644); err != nil {
log.Fatal(err)
}
c := exec.Command("/bin/dep", "-n", d.Task, d.Stage)
c.Dir = w.Path
c.Stdout = os.Stdout
c.Stderr = os.Stderr
err := c.Run()
if err != nil {
log.Fatal(err)
}
log.Println("Command completed successfully")
}
| dizk/drone-deployer | main.go | GO | apache-2.0 | 1,087 |
<?php
include_once($_SERVER['DOCUMENT_ROOT'].'/include/autoprepend.php');
include_once('include_class.php');
include_once('include_cms.php');
activateMenu('gestionsondage'); //permet de dérouler le menu contextuellement
//Translation engine is needed for the survey module
$translator =& TslManager::getInstance();
$langpile = $translator->getLanguages();
// Déclaration des variables
$errors = 0;
$msgStatus = '';
$sql='';
$id = '';
$famille_id='';
$libelle = '';
if(isset($_GET['id'])) {
$id = $_GET['id'];
$rs = getObjectById("cms_survey_ask", $id);
if (sizeof($rs) == 0){
// planté ! On loggue
error_log(" erreur lors de l'execution de la requete ".$sql);
error_log($db->ErrorMsg());
$errors++;
} else {
$libelle = stripslashes($rs->get_libelle());
$multiple = $rs->get_multiple();
$active = $rs->get_active();
}
$submitText="Modifier";
}
elseif (isset($_POST['libelle']) || isset($_POST['libelle_FR'])) {
if (isset ($_POST['libelle'])) { /// ?????
$libelle = $_POST['libelle'];
} else {
foreach ($langpile as $lang_id => $lang_props) {
$tsl_default = $_POST["libelle_FR"];
//echo "TEST : ".$tsl_default." : ".$form_field."_".$langpile[DEF_APP_LANGUE]['libellecourt']."<br />";
if ($tsl_default != '') {
$tsl_table = Array();
foreach ($langpile as $lang_id => $lang_props) {
if ($lang_id != DEF_APP_LANGUE) {
if ($_POST["libelle_".$lang_props['libellecourt']] != '')
$tsl_table[$lang_id] = $_POST["libelle_".$lang_props['libellecourt']];
}
}
$libelle = $translator->addTranslation($tsl_default, $tsl_table);
} else {
// unsset reference when updating to an empty text
$libelle = -1;
}
}
}
$multiple = (!empty($_POST['multiple']) ? 'Y' : 'N');
$active = (!empty($_POST['active']) ? 'Y' : 'N');
if (isset($_POST['id']) and (strlen($_POST['id']) > 0)) {
$id = $_POST['id'];
$oAsk = new Cms_survey_ask ($id);
$oAsk->set_libelle(addslashes($libelle));
$oAsk->set_multiple($multiple);
$oAsk->set_active($active);
$bRetour = dbUpdate($oAsk);
$msgStatus = "Modification effectuée.<br>";
} else {
$oAsk = new Cms_survey_ask();
$oAsk->set_libelle(addslashes($libelle));
$oAsk->set_multiple($multiple);
$oAsk->set_active($active);
$oAsk->set_id_site($_SESSION['idSite_travail']);
$bRetour = dbInsertWithAutokey($oAsk);
$msgStatus = "Ajout effectué.<br>";
}
if (!$bRetour) {
// planté ! On loggue
error_log(" erreur lors de l'execution de la requete ".$sql);
error_log($db->ErrorMsg());
$errors++;
} else {
//Séquences
$id = $bRetour;
}
$submitText="Modifier";
} else {
// il s'agit d'un ajout
$submitText="Ajouter";
}
if ($errors > 0) {
$msgStatus = '<i>Il y a eu des erreurs pendant le traitement.</i><br><b>Echec de la requête.</b>';
}
?>
<strong><div class="arbo2">Gestion des questions du sondage</div></strong><br><br>
<div class="arbo"><strong><?php echo $submitText ; ?> une question :</strong></div>
<br>
<?php echo $msgStatus ; ?>
<br>
<form name="ask" method="post" action="<?php echo $_SERVER['PHP_SELF'] ; ?>">
<input type="hidden" name="id" value="<?php echo $id ; ?>">
<table class="listDataTable">
<tr>
<td class="listDataTitle">Libellé de la question : </td>
<td class="listDataValue">
<?php
foreach ($langpile as $lang_id => $lang_props) {
if (isset($_POST['id']) and (strlen($_POST['id']) > 0)) {
$eTslValue = $translator->getByID($libelle, $lang_id);
}
else if (isset($_GET['id']) and (strlen($_GET['id']) > 0)) {
$eTslValue = $translator->getByID($libelle, $lang_id);
}
else {
$eTslValue = '';
}
echo "<textarea id=\"libelle_".$lang_props['libellecourt']."\" name=\"libelle_".$lang_props['libellecourt']."\" cols=\"50\" rows=\"8\" style=\"font-size:11px\">".$eTslValue."</textarea> ".$lang_props['libellecourt']." <br />";
}
?>
</td>
</tr>
<tr>
<td class="listDataTitle" colspan="2">
<input type="checkbox" name="multiple" id="multiple"<?php echo ($multiple == 'Y' ? 'checked="true"' : ''); ?> /> Cochez pour autoriser les réponses multiples.</td>
</td>
</tr>
<tr>
<td class="listDataTitle" colspan="2">
<input type="checkbox" name="active" id="active"<?php echo ($active == 'Y' ? 'checked="true"' : ''); ?> /> Cochez pour rendre la question active (publiable).</td>
</td>
</tr>
<tr>
<td class="listDataTitle" colspan="2"><input type="button" onClick="if(validate_form()) submit();" value="<?php echo $submitText ; ?>"></td>
</tr>
</table>
</form>
<div class="arbo">Retour à la <a href="question.php">liste des questions</a><?php if($id != ""){echo " | <a href=\"add_reponse.php?question=".$id."\">Associer des réponses</a>";}?></div> | CouleurCitron/cms-2014 | survey/question_form.php | PHP | apache-2.0 | 4,802 |
// /*
// * Copyright (c) 2016, Alachisoft. All Rights Reserved.
// *
// * Licensed under the Apache License, Version 2.0 (the "License");
// * you may not use this file except in compliance with the License.
// * You may obtain a copy of the License at
// *
// * http://www.apache.org/licenses/LICENSE-2.0
// *
// * Unless required by applicable law or agreed to in writing, software
// * distributed under the License is distributed on an "AS IS" BASIS,
// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// * See the License for the specific language governing permissions and
// * limitations under the License.
// */
using System;
using System.Collections;
using Alachisoft.NosDB.Serialization.Surrogates;
using System.Collections.Generic;
using Alachisoft.NosDB.Common.Serialization;
namespace Alachisoft.NosDB.Serialization
{
/// <summary>
/// Provides methods to register <see cref="ICompactSerializable"/> implementations
/// utilizing available surrogates.
/// </summary>
public sealed class CompactFormatterServices
{
static object mutex = new object();
#region / ICompactSerializable specific /
/// <summary>
/// Registers a type that implements <see cref="ICompactSerializable"/> with the system. If the
/// type is an array of <see cref="ICompactSerializable"/>s appropriate surrogates for arrays
/// and the element type are also registered.
/// </summary>
/// <param name="type">type that implements <see cref="ICompactSerializable"/></param>
/// <exception cref="ArgumentNullException">If <param name="type"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If the <param name="type"/> is already registered or when no appropriate surrogate
/// is found for the specified <param name="type"/>.
/// </exception>
static public void RegisterCompactType(Type type, short typeHandle)
{
//registers type as version compatible compact type
RegisterCompactType(type, typeHandle, true);
}
/// <summary>
/// Registers a type that implements <see cref="ICompactSerializable"/> with the system. If the
/// type is an array of <see cref="ICompactSerializable"/>s appropriate surrogates for arrays
/// and the element type are also registered.
/// </summary>
/// <param name="type">type that implements <see cref="ICompactSerializable"/></param>
/// <exception cref="ArgumentNullException">If <param name="type"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If the <param name="type"/> is already registered or when no appropriate surrogate
/// is found for the specified <param name="type"/>.
/// </exception>
static public void RegisterNonVersionCompatibleCompactType(Type type, short typeHandle)
{
RegisterCompactType(type, typeHandle, false);
}
static private void RegisterCompactType(Type type, short typeHandle, bool versionCompatible)
{
if (type == null) throw new ArgumentNullException("type");
ISerializationSurrogate surrogate = null;
if ((surrogate = TypeSurrogateSelector.GetSurrogateForTypeStrict(type,null)) != null)
{
//No need to check subHandle since this funciton us not used by DataSharing
if (surrogate.TypeHandle == typeHandle)
return; //Type is already registered with same handle.
throw new ArgumentException("Type " + type.FullName + "is already registered with different handle");
}
//if (typeof(IDictionary).IsAssignableFrom(type))
//{
// if (type.IsGenericType)
// surrogate = new GenericIDictionarySerializationSurrogate(typeof(IDictionary<,>));
// else
// surrogate = new IDictionarySerializationSurrogate(type);
//}
if (typeof(Dictionary<,>).Equals(type))
{
if (type.IsGenericType)
surrogate = new GenericIDictionarySerializationSurrogate(typeof(IDictionary<,>));
else
surrogate = new IDictionarySerializationSurrogate(type);
}
else if (type.IsArray)
{
surrogate = new ArraySerializationSurrogate(type);
}
//else if (typeof(IList).IsAssignableFrom(type))
//{
// if (type.IsGenericType)
// surrogate = new GenericIListSerializationSurrogate(typeof(IList<>));
// else
// surrogate = new IListSerializationSurrogate(type);
//}
else if (typeof(List<>).Equals(type))
{
if (type.IsGenericType)
surrogate = new GenericIListSerializationSurrogate(typeof(IList<>));
else
surrogate = new IListSerializationSurrogate(type);
}
else if (typeof(ICompactSerializable).IsAssignableFrom(type))
{
if (versionCompatible)
surrogate = new VersionCompatibleCompactSerializationSurrogate(type);
else
surrogate = new ICompactSerializableSerializationSurrogate(type);
}
else if (typeof(Enum).IsAssignableFrom(type))
{
surrogate = new EnumSerializationSurrogate(type);
}
if (surrogate == null)
throw new ArgumentException("No appropriate surrogate found for type " + type.FullName);
System.Diagnostics.Debug.WriteLine("Registered suurogate for type " + type.FullName);
TypeSurrogateSelector.RegisterTypeSurrogate(surrogate, typeHandle);
}
/// <summary>
/// Registers a type that implements <see cref="ICompactSerializable"/> with the system. If the
/// type is an array of <see cref="ICompactSerializable"/>s appropriate surrogates for arrays
/// and the element type are also registered.
/// </summary>
/// <param name="type">type that implements <see cref="ICompactSerializable"/></param>
/// <exception cref="ArgumentNullException">If <param name="type"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If the <param name="type"/> is already registered or when no appropriate surrogate
/// is found for the specified <param name="type"/>.
/// </exception>
static public void RegisterCustomCompactType(Type type, short typeHandle,string cacheContext, short subTypeHandle, Hashtable attributeOrder,bool portable,Hashtable nonCompactFields)
{
if (type == null) throw new ArgumentNullException("type");
ISerializationSurrogate surrogate = null;
if(cacheContext == null) throw new ArgumentException("cacheContext can not be null");
if ((surrogate = TypeSurrogateSelector.GetSurrogateForTypeStrict(type,cacheContext)) != null)
{
if (surrogate.TypeHandle == typeHandle && ( surrogate.SubTypeHandle == subTypeHandle || surrogate.SubTypeHandle != 0))
return; //Type is already registered with same handle.
throw new ArgumentException("Type " + type.FullName + "is already registered with different handle");
}
//if (typeof(IDictionary).IsAssignableFrom(type) && string.IsNullOrEmpty(((Type[])type.GetGenericArguments())[0].FullName))
//{
// if (type.IsGenericType)
// surrogate = new GenericIDictionarySerializationSurrogate(typeof(IDictionary<,>));
// else
// surrogate = new IDictionarySerializationSurrogate(type);
//}
if (typeof(Dictionary<,>).Equals(type) && string.IsNullOrEmpty(((Type[])type.GetGenericArguments())[0].FullName))
{
if (type.IsGenericType)
surrogate = new GenericIDictionarySerializationSurrogate(typeof(IDictionary<,>));
else
surrogate = new IDictionarySerializationSurrogate(type);
}
else if (type.IsArray)
//if (type.IsArray)
{
surrogate = new ArraySerializationSurrogate(type);
}
//else if (typeof(IList).IsAssignableFrom(type) && string.IsNullOrEmpty(((Type[])type.GetGenericArguments())[0].FullName))
//{
// if (type.IsGenericType)
// surrogate = new GenericIListSerializationSurrogate(typeof(IList<>));
// else
// surrogate = new IListSerializationSurrogate(type);
//}
else if (typeof(List<>).Equals(type) && string.IsNullOrEmpty(((Type[])type.GetGenericArguments())[0].FullName))
{
if (type.IsGenericType)
surrogate = new GenericIListSerializationSurrogate(typeof(IList<>));
else
surrogate = new IListSerializationSurrogate(type);
}
else if (typeof(ICompactSerializable).IsAssignableFrom(type))
{
surrogate = new ICompactSerializableSerializationSurrogate(type);
}
else if (typeof(Enum).IsAssignableFrom(type))
{
surrogate = new EnumSerializationSurrogate(type);
}
else
{
lock (mutex)
{
DynamicSurrogateBuilder.Portable = portable;
if (portable)
DynamicSurrogateBuilder.SubTypeHandle = subTypeHandle;
surrogate = DynamicSurrogateBuilder.CreateTypeSurrogate(type, attributeOrder,nonCompactFields);
}
}
if (surrogate == null)
throw new ArgumentException("No appropriate surrogate found for type " + type.FullName);
System.Diagnostics.Debug.WriteLine("Registered suurogate for type " + type.FullName);
TypeSurrogateSelector.RegisterTypeSurrogate(surrogate, typeHandle,cacheContext, subTypeHandle, portable);
}
/// <summary>
/// Registers a type that implements <see cref="ICompactSerializable"/> with the system. If the
/// type is an array of <see cref="ICompactSerializable"/>s appropriate surrogates for arrays
/// and the element type are also registered.
/// </summary>
/// <param name="type">type that implements <see cref="ICompactSerializable"/></param>
/// <exception cref="ArgumentNullException">If <param name="type"/> is null.
/// </exception>
/// <exception cref="ArgumentException">
/// If the <param name="type"/> is already registered or when no appropriate surrogate
/// is found for the specified <param name="type"/>.
/// </exception>
static public void RegisterCompactType(Type type)
{
if (type == null) throw new ArgumentNullException("type");
if (TypeSurrogateSelector.GetSurrogateForTypeStrict(type,null) != null)
throw new ArgumentException("Type '" + type.FullName + "' is already registered");
ISerializationSurrogate surrogate = null;
//if (typeof(IDictionary).IsAssignableFrom(type))
//{
// surrogate = new IDictionarySerializationSurrogate(type);
//}
if (typeof(Dictionary<,>).Equals(type))
{
surrogate = new IDictionarySerializationSurrogate(type);
}
else if (type.IsArray)
{
surrogate = new ArraySerializationSurrogate(type);
}
//else if (typeof(IList).IsAssignableFrom(type))
//{
// surrogate = new IListSerializationSurrogate(type);
//}
else if (typeof(List<>).Equals(type))
{
surrogate = new IListSerializationSurrogate(type);
}
else if (typeof(ICompactSerializable).IsAssignableFrom(type))
{
surrogate = new ICompactSerializableSerializationSurrogate(type);
}
else if (typeof(Enum).IsAssignableFrom(type))
{
surrogate = new EnumSerializationSurrogate(type);
}
if (surrogate == null)
throw new ArgumentException("No appropriate surrogate found for type " + type.FullName);
System.Diagnostics.Debug.WriteLine("Registered suurogate for type " + type.FullName);
TypeSurrogateSelector.RegisterTypeSurrogate(surrogate);
}
/// <summary>
/// Unregisters the surrogate for the specified type that implements
/// <see cref="ICompactSerializable"/> from the system. Used only to unregister
/// internal types.
/// <b><u>NOTE: </u></b> <b>CODE COMMENTED, NOT IMPLEMENTED</b>
/// </summary>
/// <param name="type">the specified type</param>
static public void UnregisterCompactType(Type type)
{
throw new NotImplementedException();
if (type == null) throw new ArgumentNullException("type");
if (TypeSurrogateSelector.GetSurrogateForTypeStrict(type,null) == null) return;
if (type.IsArray ||
//typeof(IDictionary).IsAssignableFrom(type) ||
//typeof(IList).IsAssignableFrom(type) ||
typeof(Dictionary<,>).Equals(type) ||
typeof(List<>).Equals(type) ||
typeof(ICompactSerializable).IsAssignableFrom(type) ||
typeof(Enum).IsAssignableFrom(type))
{
ISerializationSurrogate surrogate = TypeSurrogateSelector.GetSurrogateForTypeStrict(type,null);
TypeSurrogateSelector.UnregisterTypeSurrogate(surrogate);
System.Diagnostics.Debug.WriteLine("Unregistered suurogate for type " + type.FullName);
}
}
/// <summary>
/// Unregisters the surrogate for the Custom specified type that implements
/// <see cref="ICompactSerializable"/> from the system.
/// </summary>
/// <param name="type">the specified type</param>
static public void UnregisterCustomCompactType(Type type,string cacheContext)
{
throw new NotImplementedException();
if (type == null) throw new ArgumentNullException("type");
if (cacheContext == null) throw new ArgumentException("cacheContext can not be null");
if (TypeSurrogateSelector.GetSurrogateForTypeStrict(type,cacheContext) == null) return;
if (type.IsArray ||
//typeof(IDictionary).IsAssignableFrom(type) ||
//typeof(IList).IsAssignableFrom(type) ||
typeof(Dictionary<,>).Equals(type) ||
typeof(List<>).Equals(type) ||
typeof(ICompactSerializable).IsAssignableFrom(type) ||
typeof(Enum).IsAssignableFrom(type))
{
ISerializationSurrogate surrogate = TypeSurrogateSelector.GetSurrogateForTypeStrict(type,cacheContext);
TypeSurrogateSelector.UnregisterTypeSurrogate(surrogate,cacheContext);
System.Diagnostics.Debug.WriteLine("Unregistered suurogate for type " + type.FullName);
}
}
/// <summary>
/// Unregisters all the compact types associated with the cache context.
/// </summary>
/// <param name="cacheContext">Cache context</param>
static public void UnregisterAllCustomCompactTypes(string cacheContext)
{
if (cacheContext == null) throw new ArgumentException("cacheContext can not be null");
TypeSurrogateSelector.UnregisterAllSurrogates(cacheContext);
System.Diagnostics.Debug.WriteLine("Unregister all types " + cacheContext);
}
#endregion
}
}
| Alachisoft/NosDB | Src/Serialization/CompactFormatterServices.cs | C# | apache-2.0 | 15,893 |
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package v201302.creativetemplateservice;
import com.google.api.ads.dfp.lib.DfpService;
import com.google.api.ads.dfp.lib.DfpServiceLogger;
import com.google.api.ads.dfp.lib.DfpUser;
import com.google.api.ads.dfp.v201302.CreativeTemplate;
import com.google.api.ads.dfp.v201302.CreativeTemplatePage;
import com.google.api.ads.dfp.v201302.CreativeTemplateServiceInterface;
import com.google.api.ads.dfp.v201302.Statement;
/**
* This example gets all creative templates.
*
* Tags: CreativeTemplateService.getCreativeTemplatesByStatement
*
* @author api.arogal@gmail.com (Adam Rogal)
*/
public class GetAllCreativeTemplatesExample {
public static void main(String[] args) {
try {
// Log SOAP XML request and response.
DfpServiceLogger.log();
// Get DfpUser from "~/dfp.properties".
DfpUser user = new DfpUser();
// Get the CreativeTemplateService.
CreativeTemplateServiceInterface creativeTemplateService =
user.getService(DfpService.V201302.CREATIVE_TEMPLATE_SERVICE);
// Set defaults for page and filterStatement.
CreativeTemplatePage page = new CreativeTemplatePage();
Statement filterStatement = new Statement();
int offset = 0;
do {
// Create a statement to get all creative templates.
filterStatement.setQuery("LIMIT 500 OFFSET " + offset);
// Get creative templates by statement.
page = creativeTemplateService.getCreativeTemplatesByStatement(filterStatement);
if (page.getResults() != null) {
int i = page.getStartIndex();
for (CreativeTemplate creativeTemplate : page.getResults()) {
System.out.println(i + ") Creative template with ID \"" + creativeTemplate.getId()
+ "\", name \"" + creativeTemplate.getName()
+ "\", and type \"" + creativeTemplate.getType() + "\" was found.");
i++;
}
}
offset += 500;
} while (offset < page.getTotalResultSetSize());
System.out.println("Number of results found: " + page.getTotalResultSetSize());
} catch (Exception e) {
e.printStackTrace();
}
}
}
| google-code-export/google-api-dfp-java | examples/v201302/creativetemplateservice/GetAllCreativeTemplatesExample.java | Java | apache-2.0 | 2,767 |
from django.conf.urls import patterns, url
from api.views import query
from api.views import search
from api.views import submit
urlpatterns = patterns('',
url(r'^food/$', query.food_handler, name='food'),
url(r'^categories/all/$', query.category_all_handler, name='category'),
url(r'^categories/all/detailed/$', query.category_all_detailed_handler, name='category_detailed'),
url(r'^search/food/$', search.search_food_handler, name='search_food'),
url(r'^submit/$', submit.submit_handler, name='submit')
#url(r'^search/suggestion/$', search.search_suggestion_handler, name='search_suggestion')
)
| czgu/opendataexperience | server/api/urls.py | Python | apache-2.0 | 625 |
package gui.panels;
import gui.ButtonEditor;
import gui.ButtonRenderer;
import gui.PerfTestTableModel;
import gui.interfaces.AbstractMonitoredTestListener;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Iterator;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import shared.interfaces.IInstruction;
import tools.GUIConstants;
import tools.GUIFactory;
import tools.widgets.InstructionCreator;
import controls.ctestplanmanagement.AbstractMonitoredTest;
import controls.ctestplanmanagement.interfaces.ITestPlanManagement;
/**
*
* @author David Lecoconnier david.lecoconnier@gmail.com
* @author Jean-Luc Amitousa-Mankoy jeanluc.amitousa.mankoy@gmail.com
* @version 1.0
*/
public class InstructionPanel extends JPanel implements AbstractMonitoredTestListener {
private static final long serialVersionUID = -6149233856276646097L;
private JTable table;
private ITestPlanManagement testPlanManagement;
private AbstractMonitoredTest abstractMonitoredTest;
private JFrame frame;
private ButtonEditor editor;
public InstructionPanel(JFrame frame, ITestPlanManagement testPlanManagement, AbstractMonitoredTest abstractMonitoredTest) {
super();
this.testPlanManagement = testPlanManagement;
this.abstractMonitoredTest = abstractMonitoredTest;
this.frame = frame;
this.abstractMonitoredTest.addAbstractMonitoredTestListener(this);
this.initPanel();
}
private void initPanel() {
this.setLayout(new BorderLayout());
JPanel titlePanel = new JPanel();
JLabel titleLabel = new JLabel("Instructions");
titlePanel.add(titleLabel);
titleLabel.setFont(titleLabel.getFont().deriveFont(GUIConstants.FRAME_TITLE_FONT_SIZE));
JPanel instructionsPanel = new JPanel(new BorderLayout());
JPanel northGrid = new JPanel(new GridLayout(1, 3));
JButton addButton= new JButton(GUIConstants.INSTRUCTION_ADD);
JButton removeButton = new JButton(GUIConstants.INSTRUCTION_REMOVE);
addButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
addAction(e);
}
});
removeButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
removeAction(e);
}
});
northGrid.add(addButton);
northGrid.add(removeButton);
instructionsPanel.add(northGrid, BorderLayout.NORTH);
Object[][] data = {};
/*{"Create", GUIConstants.INSTRUCTION_EDIT},
{"Read", GUIConstants.INSTRUCTION_EDIT}
};*/
this.table = new JTable(new PerfTestTableModel(new String[]{GUIConstants.INSTRUCTION_ID, GUIConstants.INSTRUCTION_NAME, GUIConstants.INSTRUCTION_EDITION}, data));
this.editor = new ButtonEditor(new JCheckBox(), GUIConstants.INSTRUCTION_EDIT);
this.editor.getJButton().addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
editAction(e);
}
});
this.table.getColumn(GUIConstants.INSTRUCTION_EDITION).setCellRenderer(new ButtonRenderer(GUIConstants.INSTRUCTION_EDIT));
this.table.getColumn(GUIConstants.INSTRUCTION_EDITION).setCellEditor(this.editor);
instructionsPanel.add(new JScrollPane(table), BorderLayout.CENTER);
this.add(titlePanel, BorderLayout.NORTH);
this.add(instructionsPanel, BorderLayout.CENTER);
}
private void addAction(ActionEvent e) {
System.out.println("Action button pressed !");
InstructionCreator creator = GUIFactory.instructionCreator(this.frame, "Instruction creation", true);
if (creator.showDialog()) {
IInstruction instruction = this.testPlanManagement.addNewInstruction(this.abstractMonitoredTest, creator.getName());
if (null != instruction) {
instruction.setReadableRequest(creator.getRequest());
System.out.println("newTestPlanMenuItemAction");
}
}
}
private void removeAction(ActionEvent e) {
System.out.println("Remove button pressed !");
int[] selectedLines = this.table.getSelectedRows();
if (0 == selectedLines.length) {
return;
}
for (int i = selectedLines.length -1; i >=0 ; --i) {
this.testPlanManagement.removeInstruction(abstractMonitoredTest, selectedLines[i]);
}
}
private void editAction(ActionEvent e) {
System.out.println("edit button pressed");
int row = this.editor.getLastRow();
if (row == -1)
return;
InstructionCreator creator = GUIFactory.instructionCreator(this.frame, "Instruction creation", true);
IInstruction instruction = this.abstractMonitoredTest.getInstructions().get(row);
creator.setData(instruction.getName(), instruction.getReadableRequest());
if (creator.showDialog()) {
this.testPlanManagement.editInstruction(this.abstractMonitoredTest, row, creator.getName(), creator.getRequest());
}
}
@Override
public void updateData() {
((PerfTestTableModel) this.table.getModel()).clear();
Iterator<IInstruction> iter = this.abstractMonitoredTest.getInstructions().iterator();
while (iter.hasNext()) {
((PerfTestTableModel) this.table.getModel()).addRow(new Object[] {iter.next().getName(), GUIConstants.INSTRUCTION_EDIT});
}
this.updateUI();
}
}
| etrange02/PerfTestMaster | src/gui/panels/InstructionPanel.java | Java | apache-2.0 | 5,256 |
using System;
using System.Collections.Generic;
using System.Text;
namespace Rawr.Cat
{
public class CatRotationCalculator
{
public StatsCat Stats { get; set; }
public float Duration { get; set; }
public float CPPerCPG { get; set; }
public bool MaintainMangle { get; set; }
public bool GlyphOfShred { get; set; }
public float AttackSpeed { get; set; }
public float ChanceExtraCPPerHit { get; set; }
public bool OmenOfClarity { get; set; }
public float AvoidedAttacks { get; set; }
public float CPGEnergyCostMultiplier { get; set; }
public float ClearcastOnBleedChance { get; set; }
//public float MangleDuration { get; set; }
//public float RipDurationUptime { get; set; }
//public float RipDuration { get; set; }
//public float RakeDuration { get; set; }
//public float SavageRoarBonusDuration { get; set; }
public float BerserkDuration { get; set; }
//public float MeleeDamage { get; set; }
//public float MangleDamage { get; set; }
//public float ShredDamage { get; set; }
//public float RakeDamage { get; set; }
//public float RipDamage { get; set; }
//public float BiteBaseDamage { get; set; }
//public float BiteCPDamage { get; set; }
//public float MangleEnergy { get; set; }
//public float ShredEnergy { get; set; }
//public float RakeEnergy { get; set; }
//public float RipEnergy { get; set; }
//public float BiteEnergy { get; set; }
//public float RoarEnergy { get; set; }
public CatAbilityStats MeleeStats { get; set; }
public CatAbilityStats MangleStats { get; set; }
public CatAbilityStats ShredStats { get; set; }
public CatAbilityStats RakeStats { get; set; }
public CatAbilityStats RipStats { get; set; }
public CatAbilityStats BiteStats { get; set; }
public CatAbilityStats RoarStats { get; set; }
private float[] _chanceExtraCP = new float[5];
public CatRotationCalculator(StatsCat stats, float duration, float cpPerCPG, bool maintainMangle,
float berserkDuration, float attackSpeed, bool omenOfClarity, bool glyphOfShred, float avoidedAttacks,
float chanceExtraCPPerHit, float cpgEnergyCostMultiplier, float clearcastOnBleedChance,
CatAbilityStats meleeStats, CatAbilityStats mangleStats, CatAbilityStats shredStats, CatAbilityStats rakeStats,
CatAbilityStats ripStats, CatAbilityStats biteStats, CatAbilityStats roarStats)
{
Stats = stats;
Duration = duration;
CPPerCPG = cpPerCPG;
MaintainMangle = maintainMangle;
AttackSpeed = attackSpeed;
OmenOfClarity = omenOfClarity;
GlyphOfShred = glyphOfShred;
AvoidedAttacks = avoidedAttacks;
ChanceExtraCPPerHit = chanceExtraCPPerHit;
CPGEnergyCostMultiplier = cpgEnergyCostMultiplier;
ClearcastOnBleedChance = clearcastOnBleedChance;
BerserkDuration = berserkDuration;
MeleeStats = meleeStats;
MangleStats = mangleStats;
ShredStats = shredStats;
RakeStats = rakeStats;
RipStats = ripStats;
BiteStats = biteStats;
RoarStats = roarStats;
//MangleDuration = mangleDuration;
//RipDurationUptime = ripDurationUptime;
//RipDuration = ripDuration;
//RakeDuration = rakeDuration;
//SavageRoarBonusDuration = savageRoarBonusDuration;
//MeleeDamage = meleeDamage;
//MangleDamage = mangleDamage;
//ShredDamage = shredDamage;
//RakeDamage = rakeDamage;
//RipDamage = ripDamage;
//BiteBaseDamage = biteBaseDamage;
//BiteCPDamage = biteCPDamage;
//MangleEnergy = mangleEnergy;
//ShredEnergy = shredEnergy;
//RakeEnergy = rakeEnergy;
//RipEnergy = ripEnergy;
//BiteEnergy = biteEnergy;
//RoarEnergy = roarEnergy;
float c = chanceExtraCPPerHit, h = (1f - chanceExtraCPPerHit);
_chanceExtraCP[0] = c;
_chanceExtraCP[1] = c*h;
_chanceExtraCP[2] = c*c+c*h*h;
_chanceExtraCP[3] = 2*c*c*h+c*h*h*h;
_chanceExtraCP[4] = c*c*c+3*c*c*h*h+c*h*h*h*h;
//_chanceExactCP[0] = h;
//_chanceExactCP[1] = c+h*h;
//_chanceExactCP[2] = 2*c*h+h*h*h;
//_chanceExactCP[3] = c*c+3*c*h*h+h*h*h*h;
//_chanceExactCP[4] = 3*c*c*h+4*c*h*h*h+h*h*h*h*h;
//float total0 = _chanceExactCP[0] + _chanceExtraCP[0];
//float total1 = _chanceExactCP[1] + _chanceExtraCP[1];
//float total2 = _chanceExactCP[2] + _chanceExtraCP[2];
//float total3 = _chanceExactCP[3] + _chanceExtraCP[3];
//float total4 = _chanceExactCP[4] + _chanceExtraCP[4];
//ToString();
}
public CatRotationCalculation GetRotationCalculations(bool useRake, bool useShred, bool useRip, int biteCP, int roarCP)
{
float totalEnergyAvailable = 100f + (10f * Duration);
totalEnergyAvailable += ((float)Math.Ceiling((Duration - 10f) / (30f - Stats.TigersFuryCooldownReduction)) * Stats.BonusEnergyOnTigersFury);
if (BerserkDuration > 0)
totalEnergyAvailable += (float)Math.Ceiling((Duration - 10f) / 180f ) * (BerserkDuration + 7f) * 10f; //Assume 70 energy when you activate Berserk
if (OmenOfClarity)
{
float oocProcs = ((3.5f * (Duration / 60f)) / AttackSpeed) * (1f - AvoidedAttacks); //Counts all OOCs as being used on the CPG. Should be made more accurate than that, but that's close at least
if (ClearcastOnBleedChance > 0)
{
float dotTicks = (1f / 3f + 1f / 2f) * Duration;
oocProcs += dotTicks * ClearcastOnBleedChance;
}
float cpgEnergyRaw = (useShred ? ShredStats.EnergyCost : MangleStats.EnergyCost) / CPGEnergyCostMultiplier;
totalEnergyAvailable += oocProcs * (cpgEnergyRaw * (1f - AvoidedAttacks) + cpgEnergyRaw * AvoidedAttacks * 0.2f);
}
float totalCPAvailable = 0f;
float averageGCD = 1f / (1f - AvoidedAttacks);
float ripDurationUptime = RipStats.DurationUptime + (GlyphOfShred && useShred ? 6f : 0f);
float ripDurationAverage = RipStats.DurationAverage + (GlyphOfShred && useShred ? 6f : 0f);
float averageFinisherCP = 5f + _chanceExtraCP[4];
#region Melee
float meleeCount = Duration / AttackSpeed;
#endregion
#region Rake
float rakeCount = 0;
float rakeTotalEnergy = 0;
float rakeCP = 0;
if (useRake)
{
//When maintaining Mangle, lose 2 GCDs at the start of the fight to Mangle, Roar.
float durationRakeable = Duration -(MaintainMangle ? 2f * averageGCD : 0f);
//Lose some time due to Rip/Rake conflicts
float rakeRipConflict = (1f / ripDurationAverage) * 0.5f * averageGCD;
rakeCount = durationRakeable / (RakeStats.DurationAverage + rakeRipConflict);
rakeTotalEnergy = rakeCount * RakeStats.EnergyCost;
rakeCP = rakeCount * CPPerCPG;
totalCPAvailable += rakeCP;
totalEnergyAvailable -= rakeTotalEnergy;
}
#endregion
#region Mangle
float mangleCount = 0f;
float mangleTotalEnergy = 0f;
float mangleCP = 0f;
if (MaintainMangle)
{
//Lose some time due to Mangle/Rake and Mangle/Rip conflicts
float mangleRakeConflict = (1f / RakeStats.DurationAverage) * 0.5f * averageGCD;
float mangleRipConflict = (1f / ripDurationAverage) * 0.5f * averageGCD;
mangleCount = Duration / (MangleStats.DurationAverage - mangleRakeConflict - mangleRipConflict);
mangleTotalEnergy = mangleCount * MangleStats.EnergyCost;
mangleCP = mangleCount * CPPerCPG;
totalCPAvailable += mangleCP;
totalEnergyAvailable -= mangleTotalEnergy;
}
#endregion
#region Combo Point Generator
float cpgCount = 0f;
float cpgEnergy = useShred ? ShredStats.EnergyCost : MangleStats.EnergyCost;
float shredCount = 0f;
#endregion
#region Savage Roar
float averageRoarCP = ((float)roarCP + 1f) * _chanceExtraCP[roarCP - 1]
+ ((float)roarCP) * (1f - _chanceExtraCP[roarCP - 1]);
//Lose some time due to Roar/Rake, Roar/Mangle, and Roar/Rip conflicts
float roarRakeConflict = (1f / RakeStats.DurationAverage) * 0.5f * averageGCD;
float roarMangleConflict = (1f / MangleStats.DurationAverage) * 0.5f * averageGCD;
float roarRipConflict = (1f / ripDurationAverage) * 0.5f * (averageGCD * averageFinisherCP / CPPerCPG);
float roarDuration = RoarStats.DurationAverage + 5f * Math.Min(5f, averageRoarCP)
- roarRakeConflict - roarMangleConflict - roarRipConflict;
float roarCount = Duration / roarDuration;
float roarTotalEnergy = roarCount * RoarStats.EnergyCost;
float roarCPRequired = roarCount * averageRoarCP;
if (totalCPAvailable < roarCPRequired)
{
float cpToGenerate = roarCPRequired - totalCPAvailable;
float cpgToUse = cpToGenerate / CPPerCPG;
cpgCount += cpgToUse;
totalEnergyAvailable -= cpgToUse * cpgEnergy;
totalCPAvailable += cpToGenerate;
}
totalCPAvailable -= roarCPRequired;
totalEnergyAvailable -= roarTotalEnergy;
#endregion
#region Damage Finishers
float ripCount = 0f;
float biteCount = 0f;
if (useRip)
{
#region Rip
//Lose GCDs at the start of the fight to Mangle/Rake, Roar, and enough CPGs to get 5CPG.
float durationRipable = Duration - 2f * averageGCD - (averageGCD * (averageFinisherCP / CPPerCPG));
float ripCountMax = durationRipable / ripDurationAverage;
float ripsFromAvailableCP = Math.Min(ripCountMax, totalCPAvailable / averageFinisherCP);
ripCount += ripsFromAvailableCP;
totalCPAvailable -= averageFinisherCP * ripsFromAvailableCP;
totalEnergyAvailable -= RipStats.EnergyCost * ripsFromAvailableCP;
float ripCycleEnergy = (averageFinisherCP / CPPerCPG) * cpgEnergy + RipStats.EnergyCost;
float ripsFromNewCP = Math.Min(ripCountMax - ripsFromAvailableCP, totalEnergyAvailable / ripCycleEnergy);
ripCount += ripsFromNewCP;
cpgCount += (averageFinisherCP / CPPerCPG) * ripsFromNewCP;
totalEnergyAvailable -= ripCycleEnergy * ripsFromNewCP;
#endregion
}
if (biteCP > 0)
{
#region Ferocious Bite
float averageBiteCP = ((float)biteCP + 1f) * _chanceExtraCP[biteCP - 1]
+ ((float)biteCP) * (1f - _chanceExtraCP[biteCP - 1]);
float bitesFromAvailableCP = totalCPAvailable / averageBiteCP;
biteCount += bitesFromAvailableCP;
totalCPAvailable = 0;
totalEnergyAvailable -= BiteStats.EnergyCost * bitesFromAvailableCP;
float biteCycleEnergy = (averageBiteCP / CPPerCPG) * cpgEnergy + BiteStats.EnergyCost;
float bitesFromNewCP = totalEnergyAvailable / biteCycleEnergy;
biteCount += bitesFromNewCP;
cpgCount += bitesFromNewCP * (averageBiteCP / CPPerCPG);
totalEnergyAvailable = 0f;
#endregion
}
#endregion
#region Extra Energy turned into Combo Point Generators
if (totalEnergyAvailable > 0)
{
cpgCount += totalEnergyAvailable / cpgEnergy;
totalEnergyAvailable = 0f;
}
#endregion
#region Damage Totals
if (useShred) shredCount += cpgCount;
else mangleCount += cpgCount;
float meleeDamageTotal = meleeCount * MeleeStats.DamagePerSwing;
float mangleDamageTotal = mangleCount * MangleStats.DamagePerSwing;
float rakeDamageTotal = rakeCount * RakeStats.DamagePerSwing;
float shredDamageTotal = shredCount * ShredStats.DamagePerSwing;
float ripDamageTotal = ripCount * RipStats.DamagePerSwing * (ripDurationUptime / 12f);
float biteDamageTotal = biteCount * (BiteStats.DamagePerSwing + BiteStats.DamagePerSwingPerCP * biteCP);
float damageTotal = meleeDamageTotal + mangleDamageTotal + rakeDamageTotal + shredDamageTotal + ripDamageTotal + biteDamageTotal;
#endregion
//StringBuilder rotationName = new StringBuilder();
//if (MaintainMangle || !useShred) rotationName.Append("Mangle+");
//if (useRake) rotationName.Append("Rake+");
//if (useShred) rotationName.Append("Shred+");
//if (useRip) rotationName.Append("Rip+");
//if (biteCP>0) rotationName.AppendFormat("Bite{0}+", biteCP);
//rotationName.Append("Roar" + roarCP.ToString());
return new CatRotationCalculation()
{
//Name = rotationName.ToString(),
DPS = damageTotal / Duration,
TotalDamage = damageTotal,
MeleeCount = meleeCount,
MangleCount = mangleCount,
RakeCount = rakeCount,
ShredCount = shredCount,
RipCount = ripCount,
BiteCount = biteCount,
RoarCount = roarCount,
//MeleeDamageTotal = meleeDamageTotal,
//MangleDamageTotal = mangleDamageTotal,
//RakeDamageTotal = rakeDamageTotal,
//ShredDamageTotal = shredDamageTotal,
//RipDamageTotal = ripDamageTotal,
//BiteDamageTotal = biteDamageTotal,
RoarCP = roarCP,
BiteCP = biteCP,
};
//List<string> rotationName = new List<string>();
//if (MaintainMangle || !useShred) rotationName.Add("Mangle");
//if (useShred) rotationName.Add("Shred");
//if (useRip) rotationName.Add("Rip");
//if (useFerociousBite) rotationName.Add("Bite");
//rotationName.Add("Roar" + roarCP.ToString());
//return new CatRotationCalculation()
//{
// Name = string.Join(" + ", rotationName.ToArray()),
// DPS = damageTotal / Duration,
// MeleeDamageTotal = meleeDamageTotal,
// MangleDamageTotal = mangleDamageTotal,
// RakeDamageTotal = rakeDamageTotal,
// ShredDamageTotal = shredDamageTotal,
// RipDamageTotal = ripDamageTotal,
// BiteDamageTotal = biteDamageTotal,
// DamageTotal = damageTotal,
// RoarCP = roarCP,
//};
}
public class CatRotationCalculation
{
public float DPS { get; set; }
public float TotalDamage { get; set; }
//public Stats Stats { get; set; }
//public float Duration { get; set; }
//public float CPPerCPG { get; set; }
//public bool MaintainMangle { get; set; }
//public float MangleDuration { get; set; }
//public float RipDuration { get; set; }
//public float AttackSpeed { get; set; }
//public bool OmenOfClarity { get; set; }
//public float MeleeDamage { get; set; }
//public float MangleDamage { get; set; }
//public float ShredDamage { get; set; }
//public float RakeDamage { get; set; }
//public float RipDamage { get; set; }
//public float BiteDamage { get; set; }
//public float MangleEnergy { get; set; }
//public float ShredEnergy { get; set; }
//public float RakeEnergy { get; set; }
//public float RipEnergy { get; set; }
//public float BiteEnergy { get; set; }
//public float RoarEnergy { get; set; }
//public float MeleeDamageTotal { get; set; }
//public float MangleDamageTotal { get; set; }
//public float RakeDamageTotal { get; set; }
//public float ShredDamageTotal { get; set; }
//public float RipDamageTotal { get; set; }
//public float BiteDamageTotal { get; set; }
public float MeleeCount { get; set; }
public float MangleCount { get; set; }
public float ShredCount { get; set; }
public float RakeCount { get; set; }
public float RipCount { get; set; }
public float BiteCount { get; set; }
public float RoarCount { get; set; }
public int RoarCP { get; set; }
public int BiteCP { get; set; }
public override string ToString()
{
StringBuilder rotation = new StringBuilder();
if (MangleCount > 0) rotation.Append("Ma ");
if (RakeCount > 0) rotation.Append("Ra ");
if (ShredCount > 0) rotation.Append("Sh ");
if (RipCount > 0) rotation.Append("Ri ");
if (BiteCount > 0) rotation.AppendFormat("FB{0} ", BiteCP);
rotation.Append("Ro" + RoarCP.ToString());
rotation.AppendFormat("*Keep {0}cp Savage Roar up.\r\n", RoarCP);
if (MangleCount > 0) rotation.Append("Keep Mangle up.\r\n");
if (RakeCount > 0) rotation.Append("Keep Rake up.\r\n");
if (RipCount > 0) rotation.Append("Keep 5cp Rip up.\r\n");
if (BiteCount > 0) rotation.AppendFormat("Use {0}cp Ferocious Bites to spend extra combo points.\r\n", BiteCP);
if (ShredCount > 0) rotation.Append("Use Shred for combo points.");
else rotation.Append("Use Mangle for combo points.");
return rotation.ToString();
}
}
}
}
| Alacant/Rawr-RG | Rawr.Cat/CatRotationCalculator.cs | C# | apache-2.0 | 15,673 |
/*
* Copyright (C) 2013 ENTERTAILION LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.entertailion.android.slideshow.images;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import android.net.Uri;
import android.util.Log;
import com.entertailion.android.slideshow.utils.Utils;
public class PhotoNetImageLoader extends ImageLoader {
private static final String LOG_TAG = "PhotoNetImageLoader";
private static final int MAX_PAGES = 10;
public PhotoNetImageLoader(ImageManager sInstance, String query) {
super(sInstance, query);
}
@Override
public void doRun() throws Exception {
Uri uri = Uri.parse(query);
int page = 1;
do {
// http://photo.net/gallery/photocritique/filter?period=30&rank_by=avg&category=NoNudes&store_prefs_p=1&shown_tab=0&start_index=0&page=1
// http://photo.net/gallery/photocritique/filter?period=30&rank_by=avg&category=NoNudes&store_prefs_p=1&shown_tab=0&start_index=12&page=2
// http://photo.net/gallery/photocritique/filter?period=30&rank_by=avg&category=NoNudes&store_prefs_p=1&shown_tab=0&start_index=24&page=3
String url = query + "&start_index=" + ((page - 1) * 12) + "&page=" + page;
String html = Utils.getCachedData(imageManager.getContext(), url, true);
boolean found = false;
// scrape HTML for images
Document doc = Jsoup.parse(html);
// http://jsoup.org/cookbook/extracting-data/selector-syntax
Elements titles = doc.select("title");
String title = null;
if (titles != null && titles.size() > 0) {
title = titles.first().text();
}
Elements images = doc.select(".trp_photo img");
for (Element image : images) {
String src = image.attr("src");
Log.d(LOG_TAG, uri.getHost() + ": " + src);
String imageUrl = null;
// http://thumbs.photo.net/photo/16803875-sm.jpg
// http://thumbs.photo.net/photo/16803875-md.jpg
// http://gallery.photo.net/photo/16803875-lg.jpg
if (src.contains("-sm.jpg")) {
String originalSrc = src;
// check if larger resolution available
String largerImage = originalSrc.replace("-sm.jpg", "-md.jpg");
if (Utils.checkUrl(largerImage)) {
src = largerImage;
}
largerImage = originalSrc.replace("-sm.jpg", "-lg.jpg");
if (Utils.checkUrl(largerImage)) {
imageUrl = largerImage;
}
}
if (imageUrl != null) {
String pageUrl = url;
Element parent = image.parent();
if (parent.nodeName().equals("a")) {
String href = parent.attr("href");
if (href != null) {
pageUrl = "http://" + uri.getHost() + href;
}
}
doSleep();
if (image.attr("alt") != null) {
title = image.attr("alt");
}
final ImageItem imageItem = new ImageItem(imageManager.getContext(), src, imageUrl, title, uri.getHost(), url, pageUrl, url);
addItem(imageItem);
found = true;
}
}
if (!found) {
break;
}
} while (++page < MAX_PAGES);
}
} | entertailion/Slideshow-for-GTV | src/com/entertailion/android/slideshow/images/PhotoNetImageLoader.java | Java | apache-2.0 | 3,499 |
/**
* Copyright 2016-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.pay.impl;
import com.amazon.pay.Config;
import com.amazon.pay.exceptions.AmazonClientException;
import com.amazon.pay.response.model.Environment;
import com.amazon.pay.types.CurrencyCode;
import com.amazon.pay.types.Key;
import com.amazon.pay.types.Region;
import java.util.Enumeration;
import java.util.Properties;
public class PayConfig implements Config {
private String accessKey;
private char[] secretKey;
private String merchantId;
private Region region;
private Environment environment;
private CurrencyCode currencyCode;
private String applicationName;
private String applicationVersion;
private String proxyHost;
private int proxyPort;
private String proxyUsername;
private String proxyPassword;
private boolean useAutoRetryOnThrottle;
private String overrideServiceURL;
private String overrideProfileURL;
/**
* Constructor for PayConfig - container that stores all configuration parameters for client
*
* Required parameters are merchantId, accessKey and secretKey.
* Specify them using
* withSellerId() and withAccessKeyId() and withSecretKey()
* Note that default currency code is USD and default region code is US
* You can override default values using
* withRegionCode() and withCurrencyCode()
*
* Default environment is Live mode.
* You can override default environment using
* withSandboxMode(true)
*
* Optional proxy parameters allow to connect via proxy using
* parameters like withProxyHost(), withProxyPort(), withProxyUserName(), withProxyPassword().
*
*/
public PayConfig() {
}
public PayConfig(Properties prop) {
loadConfigurationFromProperties(prop);
checkIfRequriedPropertiesExist();
}
/**
* Returns the MerchantId from PayConfig
*
* @return merchantId
*/
@Override
public String getSellerId() {
return merchantId;
}
/**
*
* @param merchantId - Sets MerchantId/SellerId in PayConfig
*/
@Override
public void setSellerId(String merchantId) {
this.merchantId = merchantId;
}
/**
*
* @param merchantId - merchantId/sellerId is a unique identifier supplied by
* Amazon when you first created your account. This ID can be found
* in the Amazon Seller Central account.
*
* @return Returns updated PayConfig object
*/
public PayConfig withSellerId(String merchantId) {
this.merchantId = merchantId;
return this;
}
/**
* Returns AccessKey from PayConfig
*
* @return accessKeyId
*/
@Override
public String getAccessKey() {
return accessKey;
}
/**
*
* @param accessKey - Sets AccessKey in PayConfig
*/
@Override
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
/**
*
* @param accessKey - Sets AccessKey in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withAccessKey(String accessKey) {
this.accessKey = accessKey;
return this;
}
/**
* Returns SecretKey from PayConfig
*
* @return secretAccessKey
*
*/
@Override
public char[] getSecretKey() {
return secretKey;
}
/**
* @deprecated(since = "3.7.0") This method is deprecated, instead use setSecretKey(char[] secretKey)
* @param secretKey - Sets SecretKey in PayConfig
*/
@Override
@Deprecated
public void setSecretKey(String secretKey) {
this.secretKey = secretKey.toCharArray();
}
/**
* @param secretKey - Sets SecretKey in PayConfig
*/
@Override
public void setSecretKey(char[] secretKey) {
this.secretKey = secretKey;
}
/**
* @deprecated(since = "3.7.0") This method is deprecated, instead use withSecretKey(char[] privateKey)
* @param secretKey - Sets SecretKey in PayConfig
* @return Returns updated PayConfig object
*/
@Deprecated
public PayConfig withSecretKey(String secretKey) {
this.secretKey = secretKey.toCharArray();
return this;
}
/**
* @param secretKey - Sets SecretKey in PayConfig
* @return Returns updated PayConfig object
*/
public PayConfig withSecretKey(char[] secretKey) {
this.secretKey = secretKey;
return this;
}
/**
* Returns region code from PayConfig
*
* @see com.amazon.pay.Config
*
* @return region
*/
@Override
public Region getRegion() {
return region;
}
/**
*
* @param region - Identifies region associated with Amazon Pay API operations.
*
*/
@Override
public void setRegion(Region region) {
this.region = region;
}
/**
*
* @param region - Identifies region associated with Amazon Pay API operations.
*
* @return Returns updated PayConfig object
*
*/
public PayConfig withRegion(Region region) {
this.region = region;
return this;
}
/**
* Returns the environment from PayConfig
*
* @return environment
*/
@Override
public Environment getEnvironment() {
return environment;
}
/**
*
* @param environment - Sets environment in PayConfig.
*/
@Override
public void setEnvironment(Environment environment) {
this.environment = environment;
}
/**
*
*
* @param isSandbox - If true, sets environment to SANDBOX mode.
*
* @return Returns updated PayConfig object
*/
public PayConfig withSandboxMode(boolean isSandbox) {
if (isSandbox)
this.environment = Environment.SANDBOX;
else
this.environment = Environment.LIVE;
return this;
}
/**
* Returns currencyCode in PayConfig
* @see com.amazon.pay.impl.PayConfig
*
* @return currencyCode
*/
@Override
public CurrencyCode getCurrencyCode() {
return currencyCode;
}
/**
*
* @param currencyCode - Sets currencyCode in PayConfig
*/
@Override
public void setCurrencyCode(CurrencyCode currencyCode) {
this.currencyCode = currencyCode;
}
/**
*
* @param currencyCode - Represents currency code to be used for all Amazon Pay API operations.
* Accepts three-digit currency code, such as USD (dollars), EUR (euros), GBP (pounds) or JPY (Japanese Yen).
* The currency code in ISO 4217 format.
*
* @return Returns updated PayConfig object
*/
public PayConfig withCurrencyCode(CurrencyCode currencyCode) {
this.currencyCode = currencyCode;
return this;
}
/**
* Returns the application name from PayConfig
*
* @return applicationName
*
*/
@Override
public String getApplicationName() {
return applicationName;
}
/**
* Sets Application Name in PayConfig
*
* @param applicationName - Sets application name
*/
@Override
public void setApplicationName(String applicationName) {
this.applicationName = applicationName;
}
/**
*
* @param applicationName - This method sets application name in PayConfig
*
* @return - Returns updated PayConfig object
*
*/
public PayConfig withApplicationName(String applicationName) {
this.applicationName = applicationName;
return this;
}
/**
* Returns the application version set in PayConfig
*
* @return applicationVersion
*
*/
@Override
public String getApplicationVersion() {
return applicationVersion;
}
/**
*
* @param applicationVersion -Sets Application Version in PayConfig
*/
@Override
public void setApplicationVersion(String applicationVersion) {
this.applicationVersion = applicationVersion;
}
/**
*
* @param applicationVersion - This method sets application version in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withApplicationVersion(String applicationVersion) {
this.applicationVersion = applicationVersion;
return this;
}
/**
* Returns the proxy host set in the PayConfig
*
* @return proxyHost
*/
@Override
public String getProxyHost() {
return proxyHost;
}
/**
*
* @param proxyHost - Sets proxy host in PayConfig
*/
@Override
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
/**
*
* @param proxyHost - This method sets proxy host in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
return this;
}
/**
* Returns proxy port from PayConfig
*
* @return proxyPort
*/
@Override
public int getProxyPort() {
return proxyPort;
}
/**
*
* @param proxyPort - Sets proxy port in PayConfig
*/
@Override
public void setProxyPort(int proxyPort) {
this.proxyPort = proxyPort;
}
/**
*
* @param proxyPort - This method sets proxy port in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withProxyPort(int proxyPort) {
this.proxyPort = proxyPort;
return this;
}
/**
* Returns proxy username from PayConfig
*
* @return proxyUsername
*/
@Override
public String getProxyUsername() {
return proxyUsername;
}
/**
*
* @param proxyUsername - Sets proxy username in PayConfig
*/
@Override
public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
}
/**
*
* @param proxyUsername - This methods sets proxy username in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
return this;
}
/**
* Returns proxy password in PayConfig
*
* @return proxyPassword
*/
@Override
public String getProxyPassword() {
return proxyPassword;
}
/**
*
* @param proxyPassword - Set proxy password in PayConfig
*/
@Override
public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
}
/**
*
* @param proxyPassword - Sets proxy username in PayConfig.
*
* @return
* Returns updated PayConfig object
*/
public PayConfig withProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
return this;
}
/**
* Returns true if Client retries on service exceptions that are either 500 internal server
* errors, 503 service unavailable errors, service throttling errors.
*
* @return useAutoRetryOnThrottle
*/
@Override
public boolean isUseAutoRetryOnThrottle() {
return useAutoRetryOnThrottle;
}
/**
* Sets autoRetryOnThrottle in PayConfig
* If set to true, client will retry on service exceptions that are either 500 internal server
* errors, 503 service unavailable errors, service throttling errors.
*
* @param useAutoRetryOnThrottle - argument that sets autoRetryOnThrottle in PayConfig
*/
@Override
public void setUseAutoRetryOnThrottle(boolean useAutoRetryOnThrottle) {
this.useAutoRetryOnThrottle = useAutoRetryOnThrottle;
}
/**
* Sets autoRetryOnThrottle in PayConfig
* If set to true, client will retry on service exceptions that are either 500 internal server
* errors, 503 service unavailable errors, service throttling errors.
*
* @param useAutoRetryOnThrottle - argument that sets autoRetryOnThrottle in PayConfig
*
* @return Returns updated PayConfig object
*/
public PayConfig withUseAutoRetryOnThrottle(boolean useAutoRetryOnThrottle) {
this.useAutoRetryOnThrottle = useAutoRetryOnThrottle;
return this;
}
/**
* Returns overridden MWS Service URL in PayConfig
*
* @return overrideServiceURL
*/
@Override
public String getOverrideServiceURL() {
return overrideServiceURL;
}
/**
* @param overrideServiceURL Sets MWS Service URL override in PayConfig
* This should only be used if you need to programmatically override the default MWS service endpoint
* provided by the SDK's mwsEndpointMappingsMap in com.amazon.pay.types.ServiceConstants.
*/
@Override
public void setOverrideServiceURL(String overrideServiceURL) {
this.overrideServiceURL = overrideServiceURL;
}
/**
* @param overrideServiceURL Sets MWS Service URL override in PayConfig
* This should only be used if you need to programmatically override the default MWS service endpoint
* provided by the SDK's mwsEndpointMappingsMap in com.amazon.pay.types.ServiceConstants.
*
* @return PayConfig
*/
public PayConfig withOverrideServiceURL(String overrideServiceURL) {
this.overrideServiceURL = overrideServiceURL;
return this;
}
/**
* Returns overridden LWA Profile URL in PayConfig
*
* @return overrideProfileURL
*/
@Override
public String getOverrideProfileURL() {
return overrideProfileURL;
}
/**
* @param overrideProfileURL Sets LWA Profile URL override in PayConfig
* This should only be used if you need to programmatically override the default LWA profile endpoint
* provided by the SDK's profileEndpointMappingsMap in com.amazon.pay.types.ServiceConstants.
*/
@Override
public void setOverrideProfileURL(String overrideProfileURL) {
this.overrideProfileURL = overrideProfileURL;
}
/**
* @param overrideProfileURL Sets LWA Profile URL override in PayConfig
* This should only be used if you need to programmatically override the default LWA profile endpoint
* provided by the SDK's profileEndpointMappingsMap in com.amazon.pay.types.ServiceConstants.
*
* @return PayConfig
*/
public PayConfig withOverrideProfileURL(String overrideProfileURL) {
this.overrideProfileURL = overrideProfileURL;
return this;
}
private PayConfig loadConfigurationFromProperties(Properties prop) {
if (prop == null || prop.isEmpty())
throw new IllegalArgumentException("Properties are empty, Need required propeties to proceed configuring amazon Pay client");
Enumeration enumeration = prop.propertyNames();
while (enumeration.hasMoreElements()) {
String property = (String) enumeration.nextElement();
try {
switch (Key.valueOf(property.toUpperCase())) {
case ACCESS_KEY:
this.setAccessKey(prop.getProperty(property));
break;
case SECRET_KEY:
this.setSecretKey(prop.getProperty(property).toCharArray());
break;
case MERCHANT_ID:
this.setSellerId(prop.getProperty(property));
break;
case ENVIRONMENT:
Environment env = Environment.valueOf(prop.getProperty(property).toUpperCase());
this.setEnvironment(env);
break;
case REGION:
Region reg = Region.valueOf((prop.getProperty(property)).toUpperCase());
this.setRegion(reg);
break;
case CURRENCY_CODE:
CurrencyCode currency = CurrencyCode.valueOf((prop.getProperty(property)).toUpperCase());
this.setCurrencyCode(currency);
break;
case PROXY_HOST:
String proxyHostProperty = prop.getProperty(property);
this.setProxyHost(proxyHostProperty);
break;
case PROXY_PORT:
String proxyPortProperty = prop.getProperty(property);
if (proxyPortProperty != null && !proxyPortProperty.isEmpty())
this.setProxyPort(Integer.parseInt(proxyPortProperty));
break;
case PROXY_USERNAME:
String proxyUsernameProperty = prop.getProperty(property);
this.setProxyUsername(proxyUsernameProperty);
break;
case PROXY_PASSWORD:
String proxyPasswordProperty = prop.getProperty(property);
this.setProxyPassword(proxyPasswordProperty);
break;
case APPLICATION_NAME:
String applicationNameProperty = prop.getProperty(property);
this.setApplicationName(applicationNameProperty);
break;
case APPLICATION_VERSION:
String applicationVersionProperty = prop.getProperty(property);
this.setApplicationVersion(applicationVersionProperty);
break;
case AUTO_RETRY_ON_THROTTLE:
String useAutoRetyOnThrottle = prop.getProperty(property);
this.setUseAutoRetryOnThrottle(Boolean.valueOf(useAutoRetyOnThrottle));
break;
case OVERRIDE_SERVICE_URL:
String overrideServiceURL = prop.getProperty(property);
this.setOverrideServiceURL(overrideServiceURL);
break;
case OVERRIDE_PROFILE_URL:
String overrideProfileURL = prop.getProperty(property);
this.setOverrideProfileURL(overrideProfileURL);
break;
default:
throw new AmazonClientException(("Client error, unable to set client configuration property [Key: "
+ property +", Value: " + prop.getProperty(property) + "]. " ));
}
} catch(IllegalArgumentException e) {
throw new IllegalArgumentException("Unable to process client configuration parameter [Key: "
+ property +", Value: " + prop.getProperty(property) + "]. " , e);
}
}
return this;
}
/**
* Helper method to check if required values are set.
*
* @throws IllegalArgumentException If required values are missing.
*/
private boolean checkIfRequriedPropertiesExist() {
if (this.accessKey == null)
generateException(Key.ACCESS_KEY);
else if (this.secretKey == null || this.secretKey.length == 0)
generateException(Key.SECRET_KEY);
else if (this.merchantId == null)
generateException(Key.MERCHANT_ID);
else if (this.environment == null)
generateException(Key.ENVIRONMENT);
else if (this.region == null)
generateException(Key.REGION);
else if (this.currencyCode == null)
generateException(Key.CURRENCY_CODE);
return true;
}
private void generateException(Key propertyKey) {
throw new IllegalArgumentException(propertyKey.toString() +
" property is not set, this is a required property for Amazon Pay client configuration");
}
/**
* The string representation of configuration parameters
*
* @return Returns the string representation of configuration parameters
*/
@Override
public String toString() {
return "PayConfig{" +
"accessKeyId=" + accessKey +
", sellerId=" + merchantId +
", region=" + region +
", environment=" + environment +
", currencyCode=" + currencyCode +
", applicationName=" + applicationName +
", applicationVersion=" + applicationVersion +
", proxyHost=" + proxyHost +
", proxyPort=" + proxyPort +
", proxyUsername=" + proxyUsername +
", proxyPassword=" + proxyPassword +
", useAutoRetryOnThrottle=" + useAutoRetryOnThrottle +
", overrideServiceURL=" + overrideServiceURL +
", overrideProfileURL=" + overrideProfileURL +
"}";
}
}
| amzn/login-and-pay-with-amazon-sdk-java | src/com/amazon/pay/impl/PayConfig.java | Java | apache-2.0 | 21,709 |
package io.dropwizard.jersey.jackson;
import com.fasterxml.jackson.annotation.JsonIgnoreType;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Objects;
import com.google.common.reflect.TypeToken;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import com.sun.jersey.core.util.StringKeyObjectValueIgnoreCaseMultivaluedMap;
import io.dropwizard.jackson.Jackson;
import io.dropwizard.validation.ConstraintViolations;
import io.dropwizard.validation.Validated;
import org.hibernate.validator.constraints.NotEmpty;
import org.junit.Before;
import org.junit.Test;
import javax.validation.ConstraintViolationException;
import javax.validation.Valid;
import javax.validation.Validation;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.failBecauseExceptionWasNotThrown;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assume.assumeThat;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
// TODO: 4/24/13 <coda> -- move JacksonMessageBodyProviderTest to JerseyTest
@SuppressWarnings("unchecked")
public class JacksonMessageBodyProviderTest {
private static final Annotation[] NONE = new Annotation[0];
public static class Example {
@Min(0)
@JsonProperty
int id;
@Override
public int hashCode() {
return id;
}
@Override
public boolean equals(Object obj) {
return Objects.equal(this.id, obj);
}
}
public static class ListExample {
@NotEmpty
@Valid
@JsonProperty
List<Example> examples;
}
public interface Partial1{}
public interface Partial2{}
public static class PartialExample {
@Min(value = 0, groups = Partial1.class)
@JsonProperty
int id;
@NotNull(groups = Partial2.class)
@JsonProperty
String text;
}
@JsonIgnoreType
public static interface Ignorable {
}
@JsonIgnoreType(false)
public static interface NonIgnorable extends Ignorable {
}
private final ObjectMapper mapper = spy(Jackson.newObjectMapper());
private final JacksonMessageBodyProvider provider =
new JacksonMessageBodyProvider(mapper,
Validation.buildDefaultValidatorFactory().getValidator());
@Before
public void setUp() throws Exception {
assumeThat(Locale.getDefault().getLanguage(), is("en"));
}
@Test
public void readsDeserializableTypes() throws Exception {
assertThat(provider.isReadable(Example.class, null, null, null))
.isTrue();
}
@Test
public void writesSerializableTypes() throws Exception {
assertThat(provider.isWriteable(Example.class, null, null, null))
.isTrue();
}
@Test
public void doesNotWriteIgnoredTypes() throws Exception {
assertThat(provider.isWriteable(Ignorable.class, null, null, null))
.isFalse();
}
@Test
public void writesUnIgnoredTypes() throws Exception {
assertThat(provider.isWriteable(NonIgnorable.class, null, null, null))
.isTrue();
}
@Test
public void doesNotReadIgnoredTypes() throws Exception {
assertThat(provider.isReadable(Ignorable.class, null, null, null))
.isFalse();
}
@Test
public void readsUnIgnoredTypes() throws Exception {
assertThat(provider.isReadable(NonIgnorable.class, null, null, null))
.isTrue();
}
@Test
public void isChunked() throws Exception {
assertThat(provider.getSize(null, null, null, null, null))
.isEqualTo(-1);
}
@Test
public void deserializesRequestEntities() throws Exception {
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes());
final Class<?> klass = Example.class;
final Object obj = provider.readFrom((Class<Object>) klass,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(Example.class);
assertThat(((Example) obj).id)
.isEqualTo(1);
}
@Test
public void returnsPartialValidatedRequestEntities() throws Exception {
final Validated valid = mock(Validated.class);
doReturn(Validated.class).when(valid).annotationType();
when(valid.value()).thenReturn(new Class<?>[]{Partial1.class, Partial2.class});
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1,\"text\":\"hello Cemo\"}".getBytes());
final Class<?> klass = PartialExample.class;
final Object obj = provider.readFrom((Class<Object>) klass,
PartialExample.class,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(PartialExample.class);
assertThat(((PartialExample) obj).id)
.isEqualTo(1);
}
@Test
public void returnsPartialValidatedByGroupRequestEntities() throws Exception {
final Validated valid = mock(Validated.class);
doReturn(Validated.class).when(valid).annotationType();
when(valid.value()).thenReturn(new Class<?>[]{Partial1.class});
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes());
final Class<?> klass = PartialExample.class;
final Object obj = provider.readFrom((Class<Object>) klass,
PartialExample.class,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(PartialExample.class);
assertThat(((PartialExample) obj).id)
.isEqualTo(1);
}
@Test
public void throwsAnInvalidEntityExceptionForPartialValidatedRequestEntities() throws Exception {
final Validated valid = mock(Validated.class);
doReturn(Validated.class).when(valid).annotationType();
when(valid.value()).thenReturn(new Class<?>[]{Partial1.class, Partial2.class});
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes());
try {
final Class<?> klass = PartialExample.class;
provider.readFrom((Class<Object>) klass,
PartialExample.class,
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch(ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.containsOnly("text may not be null (was null)");
}
}
@Test
public void returnsValidatedRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":1}".getBytes());
final Class<?> klass = Example.class;
final Object obj = provider.readFrom((Class<Object>) klass,
Example.class,
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(Example.class);
assertThat(((Example) obj).id)
.isEqualTo(1);
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":-1}".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
Example.class,
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.containsOnly("id must be greater than or equal to 0 (was -1)");
}
}
@Test
public void throwsAJsonProcessingExceptionForMalformedRequestEntities() throws Exception {
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"id\":-1d".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(WebApplicationException.class);
} catch (JsonProcessingException e) {
assertThat(e.getMessage())
.startsWith("Unexpected character ('d' (code 100)): " +
"was expecting comma to separate OBJECT entries\n");
}
}
@Test
public void serializesResponseEntities() throws Exception {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
final Example example = new Example();
example.id = 500;
provider.writeTo(example,
Example.class,
Example.class,
NONE,
MediaType.APPLICATION_JSON_TYPE,
new StringKeyObjectValueIgnoreCaseMultivaluedMap(),
output);
assertThat(output.toString())
.isEqualTo("{\"id\":500}");
}
@Test(expected = ConstraintViolationException.class)
public void throwsAConstraintViolationExceptionForEmptyRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
Example.class,
new Annotation[]{valid},
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
null);
}
@Test
public void returnsValidatedArrayRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":1}, {\"id\":2}]".getBytes());
final Class<?> klass = Example[].class;
final Object obj = provider.readFrom((Class<Object>) klass,
Example[].class,
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(Example[].class);
assertThat(((Example[]) obj)[0].id)
.isEqualTo(1);
assertThat(((Example[]) obj)[1].id)
.isEqualTo(2);
}
@Test
public void returnsValidatedCollectionRequestEntities() throws Exception {
testValidatedCollectionType(Collection.class,
new TypeToken<Collection<Example>>() {}.getType());
}
@Test
public void returnsValidatedSetRequestEntities() throws Exception {
testValidatedCollectionType(Set.class,
new TypeToken<Set<Example>>() {}.getType());
}
@Test
public void returnsValidatedListRequestEntities() throws Exception {
testValidatedCollectionType(List.class,
new TypeToken<List<Example>>() {}.getType());
}
@Test
public void returnsValidatedMapRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"one\": {\"id\":1}, \"two\": {\"id\":2}}".getBytes());
final Class<?> klass = Map.class;
final Object obj = provider.readFrom((Class<Object>) klass,
new TypeToken<Map<Object, Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(Map.class);
Map<Object, Example> map = (Map<Object, Example>) obj;
assertThat(map.get("one").id).isEqualTo(1);
assertThat(map.get("two").id).isEqualTo(2);
}
private void testValidatedCollectionType(Class<?> klass, Type type) throws IOException {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":1}, {\"id\":2}]".getBytes());
final Object obj = provider.readFrom((Class<Object>) klass,
type,
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(klass);
Iterator<Example> iterator = ((Iterable<Example>)obj).iterator();
assertThat(iterator.next().id).isEqualTo(1);
assertThat(iterator.next().id).isEqualTo(2);
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidCollectionRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":-1}, {\"id\":-2}]".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<Collection<Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.contains("id must be greater than or equal to 0 (was -1)",
"id must be greater than or equal to 0 (was -2)");
}
}
@Test
public void throwsASingleInvalidEntityExceptionForInvalidCollectionRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":1}, {\"id\":-2}]".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<Collection<Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.contains("id must be greater than or equal to 0 (was -2)");
}
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidSetRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":-1}, {\"id\":-2}]".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<Set<Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.contains("id must be greater than or equal to 0 (was -1)",
"id must be greater than or equal to 0 (was -2)");
}
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidListRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("[{\"id\":-1}, {\"id\":-2}]".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<List<Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.containsOnly("id must be greater than or equal to 0 (was -1)",
"id must be greater than or equal to 0 (was -2)");
}
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidMapRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity = new ByteArrayInputStream("{\"one\": {\"id\":-1}, \"two\": {\"id\":-2}}".getBytes());
try {
final Class<?> klass = Example.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<Map<Object, Example>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.contains("id must be greater than or equal to 0 (was -1)",
"id must be greater than or equal to 0 (was -2)");
}
}
@Test
public void returnsValidatedEmbeddedListRequestEntities() throws IOException {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity =
new ByteArrayInputStream("[ {\"examples\": [ {\"id\":1 } ] } ]".getBytes());
Class<?> klass = List.class;
final Object obj = provider.readFrom((Class<Object>) klass,
new TypeToken<List<ListExample>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
assertThat(obj)
.isInstanceOf(klass);
Iterator<ListExample> iterator = ((Iterable<ListExample>)obj).iterator();
assertThat(iterator.next().examples.get(0).id).isEqualTo(1);
}
@Test
public void throwsAnInvalidEntityExceptionForInvalidEmbeddedListRequestEntities() throws Exception {
final Annotation valid = mock(Annotation.class);
doReturn(Valid.class).when(valid).annotationType();
final ByteArrayInputStream entity =
new ByteArrayInputStream("[ {\"examples\": [ {\"id\":1 } ] }, { } ]".getBytes());
try {
final Class<?> klass = List.class;
provider.readFrom((Class<Object>) klass,
new TypeToken<List<ListExample>>() {}.getType(),
new Annotation[]{ valid },
MediaType.APPLICATION_JSON_TYPE,
new MultivaluedMapImpl(),
entity);
failBecauseExceptionWasNotThrown(ConstraintViolationException.class);
} catch (ConstraintViolationException e) {
assertThat(ConstraintViolations.formatUntyped(e.getConstraintViolations()))
.containsOnly("examples may not be empty (was null)");
}
}
}
| ptomli/dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/jackson/JacksonMessageBodyProviderTest.java | Java | apache-2.0 | 22,466 |
package com.gf;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringbootLoggingApplication {
public static void main(String[] args) {
SpringApplication.run(SpringbootLoggingApplication.class, args);
}
}
| gitHubAction/hello-world | springboot-logging/src/main/java/com/gf/SpringbootLoggingApplication.java | Java | apache-2.0 | 320 |
#include <string>
#include <cstring>
#include <cstdio>
using namespace std;
namespace femm
{
char* StripKey(char *c)
{
char *d;
int i,k;
k=strlen(c);
for(i=0; i<k; i++)
{
if (c[i] == '=')
{
d=c+i+1;
return d;
}
}
return c+k;
}
char *ParseDbl(char *t, double *f)
{
if (t==NULL) return NULL;
int i,j,k,u,ws;
static char w[]="\t, \n";
char *v;
k=strlen(t);
if(k==0) return NULL;
for(i=0,u=0,v=NULL; i<k; i++)
{
for(j=0,ws=0; j<4; j++)
{
if (t[i]==w[j])
{
ws=1;
if (u==1) u=2;
}
}
if ((ws==0) && (u==0)) u=1;
if ((ws==0) && (u==2))
{
v=t+i;
break;
}
}
if (u==0) return NULL; //nothing left in the string;
if (v==NULL) v=t+k;
sscanf(t,"%lf",f);
return v;
}
char *ParseInt(char *t, int *f)
{
if (t==NULL) return NULL;
int i,j,k,u,ws;
static char w[]="\t, \n";
char *v;
k=strlen(t);
if(k==0) return NULL;
for(i=0,u=0,v=NULL; i<k; i++)
{
for(j=0,ws=0; j<4; j++)
{
if (t[i]==w[j])
{
ws=1;
if (u==1) u=2;
}
}
if ((ws==0) && (u==0)) u=1;
if ((ws==0) && (u==2))
{
v=t+i;
break;
}
}
if (u==0) return NULL; //nothing left in the string;
if (v==NULL) v=t+k;
sscanf(t,"%i",f);
return v;
}
char *ParseString(char *t, string *s)
{
if (t==NULL) return NULL;
if (std::strlen(t)==0) return t;
int n1,n2,k;
// find first quote in the source string
for(k=0,n1=-1; k< (int) std::strlen(t); k++)
{
if (t[k]=='\"')
{
n1=k;
break;
}
}
if (n1<0) return t;
// find second quote in the source string
for(k=n1+1,n2=-1; k< (int) std::strlen(t); k++)
{
if (t[k]=='\"')
{
n2=k;
break;
}
}
if (n2<0) return t;
*s=t;
*s=s->substr(n1+1,n2-n1-1);
return (t+n2+1);
}
// default function for displaying warning messages
void PrintWarningMsg(const char* message)
{
printf("%s", message);
}
}
| Bacchustaf/SyRe_MSc | xfemm-code-310/mfemm/pfemm/libfemm/fparse.cpp | C++ | apache-2.0 | 2,321 |
/*
* Copyright (c) 2007, 2008, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6627364 6627366
* @summary Synthesize important classes if they are missing from the (boot)classpath
*/
import java.io.*;
import java.util.*;
public class Main
{
File testSrc = new File(System.getProperty("test.src"));
public static void main(String[] args) throws Exception {
new Main().run();
}
public void run() throws Exception {
// compile with standard bootclasspath
compile(true, "Test.java");
// compile with various missing system classes
List<String> base_files = Arrays.asList(
"Boolean.java",
"Byte.java",
"Character.java",
"Integer.java",
"Long.java",
"Number.java",
"Object.java",
"Short.java",
"Void.java"
);
List<String> extra_files = Arrays.asList(
"Double.java",
"Float.java",
"Cloneable.java",
"Serializable.java"
);
List<String> files = new ArrayList<String>();
files.addAll(base_files);
files.add("Test.java");
compile(false, files);
for (String f: extra_files) {
files = new ArrayList<String>();
files.addAll(base_files);
files.addAll(extra_files);
files.remove(f);
files.add("Test.java");
compile(false, files);
}
if (errors > 0)
throw new Exception(errors + " errors occurred");
}
void compile(boolean stdBootClassPath, String... files) {
compile(stdBootClassPath, Arrays.asList(files));
}
void compile(boolean stdBootClassPath, List<String> files) {
File empty = new File("empty");
empty.mkdirs();
List<String> args = new ArrayList<String>();
args.add("-classpath");
args.add("empty");
if (!stdBootClassPath) {
args.add("-bootclasspath");
args.add("empty");
}
args.add("-d");
args.add(".");
for (String f: files)
args.add(new File(testSrc, f).getPath());
System.out.println("Compile: " + args);
StringWriter out = new StringWriter();
int rc = sbql4j8.com.sun.tools.javac.Main.compile(args.toArray(new String[args.size()]),
new PrintWriter(out));
System.out.println(out.toString());
System.out.println("result: " + rc);
System.out.println();
if (rc != 0)
errors++;
}
private int errors;
}
| emil-wcislo/sbql4j8 | sbql4j8/src/test/openjdk/tools/javac/synthesize/Main.java | Java | apache-2.0 | 3,656 |
package com.java110.api.listener.task;
import com.alibaba.fastjson.JSONObject;
import com.java110.api.listener.AbstractServiceApiListener;
import com.java110.core.annotation.Java110Listener;
import com.java110.core.context.DataFlowContext;
import com.java110.core.event.service.api.ServiceDataFlowEvent;
import com.java110.intf.job.ITaskInnerServiceSMO;
import com.java110.intf.job.ITaskAttrInnerServiceSMO;
import com.java110.dto.task.TaskDto;
import com.java110.dto.taskAttr.TaskAttrDto;
import com.java110.vo.ResultVo;
import com.java110.utils.constant.ServiceCodeTaskConstant;
import com.java110.utils.util.BeanConvertUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import java.util.ArrayList;
import java.util.List;
/**
* 查询小区侦听类
*/
@Java110Listener("listTasksListener")
public class ListTasksListener extends AbstractServiceApiListener {
@Autowired
private ITaskInnerServiceSMO taskInnerServiceSMOImpl;
@Autowired
private ITaskAttrInnerServiceSMO taskAttrInnerServiceSMOImpl;
@Override
public String getServiceCode() {
return ServiceCodeTaskConstant.LIST_TASKS;
}
@Override
public HttpMethod getHttpMethod() {
return HttpMethod.GET;
}
@Override
public int getOrder() {
return DEFAULT_ORDER;
}
public ITaskInnerServiceSMO getTaskInnerServiceSMOImpl() {
return taskInnerServiceSMOImpl;
}
public void setTaskInnerServiceSMOImpl(ITaskInnerServiceSMO taskInnerServiceSMOImpl) {
this.taskInnerServiceSMOImpl = taskInnerServiceSMOImpl;
}
@Override
protected void validate(ServiceDataFlowEvent event, JSONObject reqJson) {
super.validatePageInfo(reqJson);
}
@Override
protected void doSoService(ServiceDataFlowEvent event, DataFlowContext context, JSONObject reqJson) {
TaskDto taskDto = BeanConvertUtil.covertBean(reqJson, TaskDto.class);
int count = taskInnerServiceSMOImpl.queryTasksCount(taskDto);
List<TaskDto> taskDtos = null;
if (count > 0) {
taskDtos = taskInnerServiceSMOImpl.queryTasks(taskDto);
freshTaskAttr(taskDtos);
} else {
taskDtos = new ArrayList<>();
}
ResultVo resultVo = new ResultVo((int) Math.ceil((double) count / (double) reqJson.getInteger("row")), count, taskDtos);
ResponseEntity<String> responseEntity = new ResponseEntity<String>(resultVo.toString(), HttpStatus.OK);
context.setResponseEntity(responseEntity);
}
/**
* 查询属性
*
* @param taskDtos
*/
private void freshTaskAttr(List<TaskDto> taskDtos) {
for (TaskDto taskDto : taskDtos) {
TaskAttrDto taskAttrDto = new TaskAttrDto();
taskAttrDto.setTaskId(taskDto.getTaskId());
List<TaskAttrDto> taskAttrDtos = taskAttrInnerServiceSMOImpl.queryTaskAttrs(taskAttrDto);
taskDto.setTaskAttr(taskAttrDtos);
}
}
}
| java110/MicroCommunity | service-api/src/main/java/com/java110/api/listener/task/ListTasksListener.java | Java | apache-2.0 | 3,133 |
//===--- Lexer.cpp - Swift Language Lexer ---------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See http://swift.org/LICENSE.txt for license information
// See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This file implements the Lexer and Token interfaces.
//
//===----------------------------------------------------------------------===//
#include "swift/Parse/Lexer.h"
#include "swift/AST/DiagnosticsParse.h"
#include "swift/AST/Identifier.h"
#include "swift/Basic/Fallthrough.h"
#include "swift/Basic/LangOptions.h"
#include "swift/Basic/SourceManager.h"
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/ADT/SmallString.h"
#include "llvm/ADT/StringSwitch.h"
#include "llvm/ADT/Twine.h"
// FIXME: Figure out if this can be migrated to LLVM.
#include "clang/Basic/CharInfo.h"
using namespace swift;
// clang::isIdentifierHead and clang::isIdentifierBody are deliberately not in
// this list as a reminder that they are using C rules for identifiers.
// (Admittedly these are the same as Swift's right now.)
using clang::isAlphanumeric;
using clang::isDigit;
using clang::isHexDigit;
using clang::isHorizontalWhitespace;
using clang::isPrintable;
using clang::isWhitespace;
//===----------------------------------------------------------------------===//
// UTF8 Validation/Encoding/Decoding helper functions
//===----------------------------------------------------------------------===//
/// EncodeToUTF8 - Encode the specified code point into a UTF8 stream. Return
/// true if it is an erroneous code point.
static bool EncodeToUTF8(unsigned CharValue,
SmallVectorImpl<char> &Result) {
assert(CharValue >= 0x80 && "Single-byte encoding should be already handled");
// Number of bits in the value, ignoring leading zeros.
unsigned NumBits = 32-llvm::countLeadingZeros(CharValue);
// Handle the leading byte, based on the number of bits in the value.
unsigned NumTrailingBytes;
if (NumBits <= 5+6) {
// Encoding is 0x110aaaaa 10bbbbbb
Result.push_back(char(0xC0 | (CharValue >> 6)));
NumTrailingBytes = 1;
} else if (NumBits <= 4+6+6) {
// Encoding is 0x1110aaaa 10bbbbbb 10cccccc
Result.push_back(char(0xE0 | (CharValue >> (6+6))));
NumTrailingBytes = 2;
// UTF-16 surrogate pair values are not valid code points.
if (CharValue >= 0xD800 && CharValue <= 0xDFFF)
return true;
// U+FDD0...U+FDEF are also reserved
if (CharValue >= 0xFDD0 && CharValue <= 0xFDEF)
return true;
} else if (NumBits <= 3+6+6+6) {
// Encoding is 0x11110aaa 10bbbbbb 10cccccc 10dddddd
Result.push_back(char(0xF0 | (CharValue >> (6+6+6))));
NumTrailingBytes = 3;
// Reject over-large code points. These cannot be encoded as UTF-16
// surrogate pairs, so UTF-32 doesn't allow them.
if (CharValue > 0x10FFFF)
return true;
} else {
return true; // UTF8 can encode these, but they aren't valid code points.
}
// Emit all of the trailing bytes.
while (NumTrailingBytes--)
Result.push_back(char(0x80 | (0x3F & (CharValue >> (NumTrailingBytes*6)))));
return false;
}
/// CLO8 - Return the number of leading ones in the specified 8-bit value.
static unsigned CLO8(unsigned char C) {
return llvm::countLeadingOnes(uint32_t(C) << 24);
}
/// isStartOfUTF8Character - Return true if this isn't a UTF8 continuation
/// character, which will be of the form 0b10XXXXXX
static bool isStartOfUTF8Character(unsigned char C) {
return (signed char)C >= 0 || C >= 0xC0; // C0 = 0b11000000
}
/// validateUTF8CharacterAndAdvance - Given a pointer to the starting byte of a
/// UTF8 character, validate it and advance the lexer past it. This returns the
/// encoded character or ~0U if the encoding is invalid.
static uint32_t validateUTF8CharacterAndAdvance(const char *&Ptr,
const char *End) {
if (Ptr >= End)
return ~0U;
unsigned char CurByte = *Ptr++;
if (CurByte < 0x80)
return CurByte;
// Read the number of high bits set, which indicates the number of bytes in
// the character.
unsigned EncodedBytes = CLO8(CurByte);
// If this is 0b10XXXXXX, then it is a continuation character.
if (EncodedBytes == 1 ||
// If the number of encoded bytes is > 4, then this is an invalid
// character in the range of 0xF5 and above. These would start an
// encoding for something that couldn't be represented with UTF16
// digraphs, so Unicode rejects them.
EncodedBytes > 4) {
// Skip until we get the start of another character. This is guaranteed to
// at least stop at the nul at the end of the buffer.
while (Ptr < End && !isStartOfUTF8Character(*Ptr))
++Ptr;
return ~0U;
}
// Drop the high bits indicating the # bytes of the result.
unsigned CharValue = (unsigned char)(CurByte << EncodedBytes) >> EncodedBytes;
// Read and validate the continuation bytes.
for (unsigned i = 1; i != EncodedBytes; ++i) {
if (Ptr >= End)
return ~0U;
CurByte = *Ptr;
// If the high bit isn't set or the second bit isn't clear, then this is not
// a continuation byte!
if (CurByte < 0x80 || CurByte >= 0xC0) return ~0U;
// Accumulate our result.
CharValue <<= 6;
CharValue |= CurByte & 0x3F;
++Ptr;
}
// UTF-16 surrogate pair values are not valid code points.
if (CharValue >= 0xD800 && CharValue <= 0xDFFF)
return ~0U;
// If we got here, we read the appropriate number of accumulated bytes.
// Verify that the encoding was actually minimal.
// Number of bits in the value, ignoring leading zeros.
unsigned NumBits = 32-llvm::countLeadingZeros(CharValue);
if (NumBits <= 5+6)
return EncodedBytes == 2 ? CharValue : ~0U;
if (NumBits <= 4+6+6)
return EncodedBytes == 3 ? CharValue : ~0U;
return EncodedBytes == 4 ? CharValue : ~0U;
}
//===----------------------------------------------------------------------===//
// Setup and Helper Methods
//===----------------------------------------------------------------------===//
Lexer::Lexer(const LangOptions &Options,
const SourceManager &SM, DiagnosticEngine *Diags,
unsigned BufferID, bool InSILMode,
CommentRetentionMode RetainComments)
: LangOpts(Options), SourceMgr(SM), Diags(Diags), BufferID(BufferID),
InSILMode(InSILMode), RetainComments(RetainComments) {
// Initialize buffer pointers.
StringRef contents = SM.extractText(SM.getRangeForBuffer(BufferID));
BufferStart = contents.data();
BufferEnd = contents.data() + contents.size();
CurPtr = BufferStart;
// Initialize code completion.
if (BufferID == SM.getCodeCompletionBufferID()) {
const char *Ptr = BufferStart + SM.getCodeCompletionOffset();
if (Ptr >= BufferStart && Ptr <= BufferEnd)
CodeCompletionPtr = Ptr;
}
}
void Lexer::primeLexer() {
assert(NextToken.is(tok::NUM_TOKENS));
lexImpl();
assert((NextToken.isAtStartOfLine() || CurPtr != BufferStart) &&
"The token should be at the beginning of the line, "
"or we should be lexing from the middle of the buffer");
}
void Lexer::initSubLexer(Lexer &Parent, State BeginState, State EndState) {
assert(BufferID == SourceMgr.findBufferContainingLoc(BeginState.Loc) &&
"state for the wrong buffer");
assert(BufferID == SourceMgr.findBufferContainingLoc(EndState.Loc) &&
"state for the wrong buffer");
// If the parent lexer should stop prematurely, and the ArtificialEOF
// position is in this subrange, then we should stop at that point, too.
const char *BeginStatePtr = getBufferPtrForSourceLoc(BeginState.Loc);
const char *EndStatePtr = getBufferPtrForSourceLoc(EndState.Loc);
if (Parent.ArtificialEOF &&
Parent.ArtificialEOF >= BeginStatePtr &&
Parent.ArtificialEOF <= EndStatePtr) {
ArtificialEOF = Parent.ArtificialEOF;
} else
ArtificialEOF = EndStatePtr;
primeLexer();
restoreState(BeginState);
}
InFlightDiagnostic Lexer::diagnose(const char *Loc, Diagnostic Diag) {
if (Diags)
return Diags->diagnose(getSourceLoc(Loc), Diag);
return InFlightDiagnostic();
}
Token Lexer::getTokenAt(SourceLoc Loc) {
assert(BufferID == static_cast<unsigned>(
SourceMgr.findBufferContainingLoc(Loc)) &&
"location from the wrong buffer");
Lexer L(LangOpts, SourceMgr, BufferID, Diags, InSILMode,
CommentRetentionMode::None);
L.restoreState(State(Loc));
Token Result;
L.lex(Result);
return Result;
}
void Lexer::formToken(tok Kind, const char *TokStart) {
assert(CurPtr >= BufferStart &&
CurPtr <= BufferEnd && "Current pointer out of range!");
// When we are lexing a subrange from the middle of a file buffer, we will
// run past the end of the range, but will stay within the file. Check if
// we are past the imaginary EOF, and synthesize a tok::eof in this case.
if (Kind != tok::eof && ArtificialEOF && TokStart >= ArtificialEOF) {
Kind = tok::eof;
}
unsigned CommentLength = 0;
if (RetainComments == CommentRetentionMode::AttachToNextToken && SeenComment)
CommentLength = TokStart - LastCommentBlockStart;
NextToken.setToken(Kind, StringRef(TokStart, CurPtr-TokStart),
CommentLength);
}
Lexer::State Lexer::getStateForBeginningOfTokenLoc(SourceLoc Loc) const {
const char *Ptr = getBufferPtrForSourceLoc(Loc);
// Skip whitespace backwards until we hit a newline. This is needed to
// correctly lex the token if it is at the beginning of the line.
while (Ptr >= BufferStart + 1) {
char C = Ptr[-1];
if (C == ' ' || C == '\t') {
Ptr--;
continue;
}
if (C == 0) {
// A NUL character can be either whitespace we diagnose or a code
// completion token.
if (Ptr - 1 == CodeCompletionPtr)
break;
Ptr--;
continue;
}
if (C == '\n' || C == '\r') {
Ptr--;
break;
}
break;
}
return State(SourceLoc(llvm::SMLoc::getFromPointer(Ptr)));
}
//===----------------------------------------------------------------------===//
// Lexer Subroutines
//===----------------------------------------------------------------------===//
static void diagnoseEmbeddedNul(DiagnosticEngine *Diags, const char *Ptr) {
assert(Ptr && "invalid source location");
assert(*Ptr == '\0' && "not an embedded null");
if (!Diags)
return;
SourceLoc NulLoc = Lexer::getSourceLoc(Ptr);
SourceLoc NulEndLoc = Lexer::getSourceLoc(Ptr+1);
Diags->diagnose(NulLoc, diag::lex_nul_character)
.fixItRemoveChars(NulLoc, NulEndLoc);
}
void Lexer::skipToEndOfLine() {
while (1) {
switch (*CurPtr++) {
case '\n':
case '\r':
NextToken.setAtStartOfLine(true);
return; // If we found the end of the line, return.
default:
// If this is a "high" UTF-8 character, validate it.
if ((signed char)(CurPtr[-1]) < 0) {
--CurPtr;
const char *CharStart = CurPtr;
if (validateUTF8CharacterAndAdvance(CurPtr, BufferEnd) == ~0U)
diagnose(CharStart, diag::lex_invalid_utf8);
}
break; // Otherwise, eat other characters.
case 0:
// If this is a random nul character in the middle of a buffer, skip it as
// whitespace.
if (CurPtr-1 != BufferEnd) {
diagnoseEmbeddedNul(Diags, CurPtr-1);
break;
}
// Otherwise, the last line of the file does not have a newline.
--CurPtr;
return;
}
}
}
void Lexer::skipSlashSlashComment() {
assert(CurPtr[-1] == '/' && CurPtr[0] == '/' && "Not a // comment");
skipToEndOfLine();
}
void Lexer::skipHashbang() {
assert(CurPtr == BufferStart && CurPtr[0] == '#' && CurPtr[1] == '!' &&
"Not a hashbang");
skipToEndOfLine();
}
/// skipSlashStarComment - /**/ comments are skipped (treated as whitespace).
/// Note that (unlike in C) block comments can be nested.
void Lexer::skipSlashStarComment() {
const char *StartPtr = CurPtr-1;
assert(CurPtr[-1] == '/' && CurPtr[0] == '*' && "Not a /* comment");
// Make sure to advance over the * so that we don't incorrectly handle /*/ as
// the beginning and end of the comment.
++CurPtr;
// /**/ comments can be nested, keep track of how deep we've gone.
unsigned Depth = 1;
while (1) {
switch (*CurPtr++) {
case '*':
// Check for a '*/'
if (*CurPtr == '/') {
++CurPtr;
if (--Depth == 0)
return;
}
break;
case '/':
// Check for a '/*'
if (*CurPtr == '*') {
++CurPtr;
++Depth;
}
break;
case '\n':
case '\r':
NextToken.setAtStartOfLine(true);
break;
default:
// If this is a "high" UTF-8 character, validate it.
if ((signed char)(CurPtr[-1]) < 0) {
--CurPtr;
const char *CharStart = CurPtr;
if (validateUTF8CharacterAndAdvance(CurPtr, BufferEnd) == ~0U)
diagnose(CharStart, diag::lex_invalid_utf8);
}
break; // Otherwise, eat other characters.
case 0:
// If this is a random nul character in the middle of a buffer, skip it as
// whitespace.
if (CurPtr-1 != BufferEnd) {
diagnoseEmbeddedNul(Diags, CurPtr-1);
break;
}
// Otherwise, we have an unterminated /* comment.
--CurPtr;
// Count how many levels deep we are.
llvm::SmallString<8> Terminator("*/");
while (--Depth != 0)
Terminator += "*/";
const char *EOL = (CurPtr[-1] == '\n') ? (CurPtr - 1) : CurPtr;
diagnose(EOL, diag::lex_unterminated_block_comment)
.fixItInsert(getSourceLoc(EOL), Terminator);
diagnose(StartPtr, diag::lex_comment_start);
return;
}
}
}
static bool isValidIdentifierContinuationCodePoint(uint32_t c) {
if (c < 0x80)
return clang::isIdentifierBody(c, /*dollar*/true);
// N1518: Recommendations for extended identifier characters for C and C++
// Proposed Annex X.1: Ranges of characters allowed
return c == 0x00A8 || c == 0x00AA || c == 0x00AD || c == 0x00AF
|| (c >= 0x00B2 && c <= 0x00B5) || (c >= 0x00B7 && c <= 0x00BA)
|| (c >= 0x00BC && c <= 0x00BE) || (c >= 0x00C0 && c <= 0x00D6)
|| (c >= 0x00D8 && c <= 0x00F6) || (c >= 0x00F8 && c <= 0x00FF)
|| (c >= 0x0100 && c <= 0x167F)
|| (c >= 0x1681 && c <= 0x180D)
|| (c >= 0x180F && c <= 0x1FFF)
|| (c >= 0x200B && c <= 0x200D)
|| (c >= 0x202A && c <= 0x202E)
|| (c >= 0x203F && c <= 0x2040)
|| c == 0x2054
|| (c >= 0x2060 && c <= 0x206F)
|| (c >= 0x2070 && c <= 0x218F)
|| (c >= 0x2460 && c <= 0x24FF)
|| (c >= 0x2776 && c <= 0x2793)
|| (c >= 0x2C00 && c <= 0x2DFF)
|| (c >= 0x2E80 && c <= 0x2FFF)
|| (c >= 0x3004 && c <= 0x3007)
|| (c >= 0x3021 && c <= 0x302F)
|| (c >= 0x3031 && c <= 0x303F)
|| (c >= 0x3040 && c <= 0xD7FF)
|| (c >= 0xF900 && c <= 0xFD3D)
|| (c >= 0xFD40 && c <= 0xFDCF)
|| (c >= 0xFDF0 && c <= 0xFE44)
|| (c >= 0xFE47 && c <= 0xFFF8)
|| (c >= 0x10000 && c <= 0x1FFFD)
|| (c >= 0x20000 && c <= 0x2FFFD)
|| (c >= 0x30000 && c <= 0x3FFFD)
|| (c >= 0x40000 && c <= 0x4FFFD)
|| (c >= 0x50000 && c <= 0x5FFFD)
|| (c >= 0x60000 && c <= 0x6FFFD)
|| (c >= 0x70000 && c <= 0x7FFFD)
|| (c >= 0x80000 && c <= 0x8FFFD)
|| (c >= 0x90000 && c <= 0x9FFFD)
|| (c >= 0xA0000 && c <= 0xAFFFD)
|| (c >= 0xB0000 && c <= 0xBFFFD)
|| (c >= 0xC0000 && c <= 0xCFFFD)
|| (c >= 0xD0000 && c <= 0xDFFFD)
|| (c >= 0xE0000 && c <= 0xEFFFD);
}
static bool isValidIdentifierStartCodePoint(uint32_t c) {
if (!isValidIdentifierContinuationCodePoint(c))
return false;
if (c < 0x80 && (isDigit(c) || c == '$'))
return false;
// N1518: Recommendations for extended identifier characters for C and C++
// Proposed Annex X.2: Ranges of characters disallowed initially
if ((c >= 0x0300 && c <= 0x036F) ||
(c >= 0x1DC0 && c <= 0x1DFF) ||
(c >= 0x20D0 && c <= 0x20FF) ||
(c >= 0xFE20 && c <= 0xFE2F))
return false;
return true;
}
static bool advanceIf(char const *&ptr, char const *end,
bool (*predicate)(uint32_t)) {
char const *next = ptr;
uint32_t c = validateUTF8CharacterAndAdvance(next, end);
if (c == ~0U)
return false;
if (predicate(c)) {
ptr = next;
return true;
}
return false;
}
static bool advanceIfValidStartOfIdentifier(char const *&ptr,
char const *end) {
return advanceIf(ptr, end, isValidIdentifierStartCodePoint);
}
static bool advanceIfValidContinuationOfIdentifier(char const *&ptr,
char const *end) {
return advanceIf(ptr, end, isValidIdentifierContinuationCodePoint);
}
static bool advanceIfValidStartOfOperator(char const *&ptr,
char const *end) {
return advanceIf(ptr, end, Identifier::isOperatorStartCodePoint);
}
static bool advanceIfValidContinuationOfOperator(char const *&ptr,
char const *end) {
return advanceIf(ptr, end, Identifier::isOperatorContinuationCodePoint);
}
bool Lexer::isIdentifier(StringRef string) {
if (string.empty()) return false;
char const *p = string.data(), *end = string.end();
if (!advanceIfValidStartOfIdentifier(p, end))
return false;
while (p < end && advanceIfValidContinuationOfIdentifier(p, end));
return p == end;
}
/// \brief Determines if the given string is a valid operator identifier,
/// without escaping characters.
bool Lexer::isOperator(StringRef string) {
if (string.empty()) return false;
char const *p = string.data(), *end = string.end();
if (!advanceIfValidStartOfOperator(p, end))
return false;
while (p < end && advanceIfValidContinuationOfOperator(p, end));
return p == end;
}
tok Lexer::kindOfIdentifier(StringRef Str, bool InSILMode) {
tok Kind = llvm::StringSwitch<tok>(Str)
#define KEYWORD(kw) \
.Case(#kw, tok::kw_##kw)
#include "swift/Parse/Tokens.def"
.Default(tok::identifier);
// These keywords are only active in SIL mode.
if ((Kind == tok::kw_sil || Kind == tok::kw_sil_stage ||
Kind == tok::kw_sil_vtable || Kind == tok::kw_sil_global ||
Kind == tok::kw_sil_witness_table || Kind == tok::kw_sil_coverage_map ||
Kind == tok::kw_undef) &&
!InSILMode)
Kind = tok::identifier;
return Kind;
}
/// lexIdentifier - Match [a-zA-Z_][a-zA-Z_$0-9]*
void Lexer::lexIdentifier() {
const char *TokStart = CurPtr-1;
CurPtr = TokStart;
bool didStart = advanceIfValidStartOfIdentifier(CurPtr, BufferEnd);
assert(didStart && "Unexpected start");
(void) didStart;
// Lex [a-zA-Z_$0-9[[:XID_Continue:]]]*
while (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd));
tok Kind = kindOfIdentifier(StringRef(TokStart, CurPtr-TokStart), InSILMode);
return formToken(Kind, TokStart);
}
/// Is the operator beginning at the given character "left-bound"?
static bool isLeftBound(const char *tokBegin, const char *bufferBegin) {
// The first character in the file is not left-bound.
if (tokBegin == bufferBegin) return false;
switch (tokBegin[-1]) {
case ' ': case '\r': case '\n': case '\t': // whitespace
case '(': case '[': case '{': // opening delimiters
case ',': case ';': case ':': // expression separators
case '\0': // whitespace / last char in file
return false;
default:
return true;
}
}
/// Is the operator ending at the given character (actually one past the end)
/// "right-bound"?
static bool isRightBound(const char *tokEnd, bool isLeftBound) {
switch (*tokEnd) {
case ' ': case '\r': case '\n': case '\t': // whitespace
case ')': case ']': case '}': // closing delimiters
case ',': case ';': case ':': // expression separators
case '\0': // whitespace / last char in file
return false;
case '.':
// Prefer the '^' in "x^.y" to be a postfix op, not binary, but the '^' in
// "^.y" to be a prefix op, not binary.
return !isLeftBound;
default:
return true;
}
}
/// lexOperatorIdentifier - Match identifiers formed out of punctuation.
void Lexer::lexOperatorIdentifier() {
const char *TokStart = CurPtr-1;
CurPtr = TokStart;
bool didStart = advanceIfValidStartOfOperator(CurPtr, BufferEnd);
assert(didStart && "unexpected operator start");
(void) didStart;
do {
if (CurPtr != BufferEnd && InSILBody &&
(*CurPtr == '!' || *CurPtr == '?'))
// When parsing SIL body, '!' and '?' are special token and can't be
// in the middle of an operator.
break;
// '.' cannot appear in the middle of an operator unless the operator
// started with a '.'.
if (*CurPtr == '.' && *TokStart != '.')
break;
} while (advanceIfValidContinuationOfOperator(CurPtr, BufferEnd));
// Decide between the binary, prefix, and postfix cases.
// It's binary if either both sides are bound or both sides are not bound.
// Otherwise, it's postfix if left-bound and prefix if right-bound.
bool leftBound = isLeftBound(TokStart, BufferStart);
bool rightBound = isRightBound(CurPtr, leftBound);
// Match various reserved words.
if (CurPtr-TokStart == 1) {
switch (TokStart[0]) {
case '=':
if (leftBound != rightBound) {
auto d = diagnose(TokStart, diag::lex_unary_equal);
if (leftBound)
d.fixItInsert(getSourceLoc(TokStart), " ");
else
d.fixItInsert(getSourceLoc(TokStart+1), " ");
}
// always emit 'tok::equal' to avoid trickle down parse errors
return formToken(tok::equal, TokStart);
case '&':
if (leftBound == rightBound || leftBound)
break;
return formToken(tok::amp_prefix, TokStart);
case '.':
if (leftBound == rightBound)
return formToken(tok::period, TokStart);
if (rightBound)
return formToken(tok::period_prefix, TokStart);
diagnose(TokStart, diag::lex_unary_postfix_dot_is_reserved);
// always emit 'tok::period' to avoid trickle down parse errors
return formToken(tok::period, TokStart);
case '?':
if (leftBound)
return formToken(tok::question_postfix, TokStart);
return formToken(tok::question_infix, TokStart);
}
} else if (CurPtr-TokStart == 2) {
switch ((TokStart[0] << 8) | TokStart[1]) {
case ('-' << 8) | '>': // ->
return formToken(tok::arrow, TokStart);
case ('*' << 8) | '/': // */
diagnose(TokStart, diag::lex_unexpected_block_comment_end);
return formToken(tok::unknown, TokStart);
}
} else {
// If there is a "//" in the middle of an identifier token, it starts
// a single-line comment.
auto Pos = StringRef(TokStart, CurPtr-TokStart).find("//");
if (Pos != StringRef::npos)
CurPtr = TokStart+Pos;
// If there is a "/*" in the middle of an identifier token, it starts
// a multi-line comment.
Pos = StringRef(TokStart, CurPtr-TokStart).find("/*");
if (Pos != StringRef::npos)
CurPtr = TokStart+Pos;
// Verify there is no "*/" in the middle of the identifier token, we reject
// it as potentially ending a block comment.
Pos = StringRef(TokStart, CurPtr-TokStart).find("*/");
if (Pos != StringRef::npos) {
diagnose(TokStart+Pos, diag::lex_unexpected_block_comment_end);
return formToken(tok::unknown, TokStart);
}
}
if (leftBound == rightBound)
return formToken(leftBound ? tok::oper_binary_unspaced :
tok::oper_binary_spaced, TokStart);
return formToken(leftBound ? tok::oper_postfix : tok::oper_prefix, TokStart);
}
/// lexDollarIdent - Match $[0-9a-zA-Z_$]*
void Lexer::lexDollarIdent() {
const char *tokStart = CurPtr-1;
assert(*tokStart == '$');
// In a SIL function body, '$' is a token by itself.
if (InSILBody)
return formToken(tok::sil_dollar, tokStart);
bool isAllDigits = true;
for (;; ++CurPtr) {
if (isDigit(*CurPtr)) {
// continue
} else if (clang::isIdentifierHead(*CurPtr, /*dollar*/true)) {
isAllDigits = false;
// continue
} else {
break;
}
}
// It's always an error to see a standalone $, and we reserve
// $nonNumeric for persistent bindings in the debugger.
if (CurPtr == tokStart + 1 || !isAllDigits) {
if (!isAllDigits && !LangOpts.EnableDollarIdentifiers)
diagnose(tokStart, diag::expected_dollar_numeric);
// Even if we diagnose, we go ahead and form an identifier token,
// in part to ensure that the basic behavior of the lexer is
// independent of language mode.
return formToken(tok::identifier, tokStart);
} else {
return formToken(tok::dollarident, tokStart);
}
}
void Lexer::lexHexNumber() {
// We assume we're starting from the 'x' in a '0x...' floating-point literal.
assert(*CurPtr == 'x' && "not a hex literal");
const char *TokStart = CurPtr-1;
assert(*TokStart == '0' && "not a hex literal");
// 0x[0-9a-fA-F][0-9a-fA-F_]*
++CurPtr;
if (!isHexDigit(*CurPtr)) {
diagnose(CurPtr, diag::lex_expected_digit_in_int_literal);
while (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd));
return formToken(tok::unknown, TokStart);
}
while (isHexDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
if (CurPtr - TokStart == 2) {
diagnose(CurPtr, diag::lex_expected_digit_in_int_literal);
while (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd));
return formToken(tok::unknown, TokStart);
}
if (*CurPtr != '.' && *CurPtr != 'p' && *CurPtr != 'P')
return formToken(tok::integer_literal, TokStart);
// (\.[0-9A-Fa-f][0-9A-Fa-f_]*)?
if (*CurPtr == '.') {
++CurPtr;
// If the character after the '.' is not a digit, assume we have an int
// literal followed by a dot expression.
if (!isHexDigit(*CurPtr)) {
--CurPtr;
return formToken(tok::integer_literal, TokStart);
}
while (isHexDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
if (*CurPtr != 'p' && *CurPtr != 'P') {
diagnose(CurPtr, diag::lex_expected_binary_exponent_in_hex_float_literal);
return formToken(tok::unknown, TokStart);
}
}
// [pP][+-]?[0-9][0-9_]*
assert(*CurPtr == 'p' || *CurPtr == 'P' && "not at a hex float exponent?!");
++CurPtr;
if (*CurPtr == '+' || *CurPtr == '-')
++CurPtr; // Eat the sign.
if (!isDigit(*CurPtr)) {
diagnose(CurPtr, diag::lex_expected_digit_in_fp_exponent);
return formToken(tok::unknown, TokStart);
}
while (isDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
return formToken(tok::floating_literal, TokStart);
}
/// lexNumber:
/// integer_literal ::= [0-9][0-9_]*
/// integer_literal ::= 0x[0-9a-fA-F][0-9a-fA-F_]*
/// integer_literal ::= 0o[0-7][0-7_]*
/// integer_literal ::= 0b[01][01_]*
/// floating_literal ::= [0-9][0-9]_*\.[0-9][0-9_]*
/// floating_literal ::= [0-9][0-9]*\.[0-9][0-9_]*[eE][+-]?[0-9][0-9_]*
/// floating_literal ::= [0-9][0-9_]*[eE][+-]?[0-9][0-9_]*
/// floating_literal ::= 0x[0-9A-Fa-f][0-9A-Fa-f_]*
/// (\.[0-9A-Fa-f][0-9A-Fa-f_]*)?[pP][+-]?[0-9][0-9_]*
void Lexer::lexNumber() {
const char *TokStart = CurPtr-1;
assert((isDigit(*TokStart) || *TokStart == '.') && "Unexpected start");
auto expected_digit = [&](const char *loc, Diag<> msg) {
diagnose(loc, msg);
while (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd));
return formToken(tok::unknown, TokStart);
};
if (*TokStart == '0' && *CurPtr == 'x')
return lexHexNumber();
if (*TokStart == '0' && *CurPtr == 'o') {
// 0o[0-7][0-7_]*
++CurPtr;
if (*CurPtr < '0' || *CurPtr > '7')
return expected_digit(CurPtr, diag::lex_expected_digit_in_int_literal);
while ((*CurPtr >= '0' && *CurPtr <= '7') || *CurPtr == '_')
++CurPtr;
if (CurPtr - TokStart == 2)
return expected_digit(CurPtr, diag::lex_expected_digit_in_int_literal);
return formToken(tok::integer_literal, TokStart);
}
if (*TokStart == '0' && *CurPtr == 'b') {
// 0b[01][01_]*
++CurPtr;
if (*CurPtr != '0' && *CurPtr != '1')
return expected_digit(CurPtr, diag::lex_expected_digit_in_int_literal);
while (*CurPtr == '0' || *CurPtr == '1' || *CurPtr == '_')
++CurPtr;
if (CurPtr - TokStart == 2)
return expected_digit(CurPtr, diag::lex_expected_digit_in_int_literal);
return formToken(tok::integer_literal, TokStart);
}
// Handle a leading [0-9]+, lexing an integer or falling through if we have a
// floating point value.
while (isDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
// Lex things like 4.x as '4' followed by a tok::period.
if (*CurPtr == '.') {
// NextToken is the soon to be previous token
// Therefore: x.0.1 is sub-tuple access, not x.float_literal
if (!isDigit(CurPtr[1]) || NextToken.is(tok::period))
return formToken(tok::integer_literal, TokStart);
} else {
// Floating literals must have '.', 'e', or 'E' after digits. If it is
// something else, then this is the end of the token.
if (*CurPtr != 'e' && *CurPtr != 'E') {
char const *tmp = CurPtr;
if (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd))
return expected_digit(tmp, diag::lex_expected_digit_in_int_literal);
return formToken(tok::integer_literal, TokStart);
}
}
// Lex decimal point.
if (*CurPtr == '.') {
++CurPtr;
// Lex any digits after the decimal point.
while (isDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
}
// Lex exponent.
if (*CurPtr == 'e' || *CurPtr == 'E') {
++CurPtr; // Eat the 'e'
if (*CurPtr == '+' || *CurPtr == '-')
++CurPtr; // Eat the sign.
if (!isDigit(*CurPtr))
return expected_digit(CurPtr, diag::lex_expected_digit_in_fp_exponent);
while (isDigit(*CurPtr) || *CurPtr == '_')
++CurPtr;
}
return formToken(tok::floating_literal, TokStart);
}
/// unicode_character_escape ::= [\]u{hex+}
/// hex ::= [0-9a-fA-F]
unsigned Lexer::lexUnicodeEscape(const char *&CurPtr, Lexer *Diags) {
assert(CurPtr[0] == '{' && "Invalid unicode escape");
++CurPtr;
const char *DigitStart = CurPtr;
unsigned NumDigits = 0;
for (; isHexDigit(CurPtr[0]); ++NumDigits)
++CurPtr;
if (CurPtr[0] != '}') {
if (Diags)
Diags->diagnose(CurPtr, diag::lex_invalid_u_escape_rbrace);
return ~1U;
}
++CurPtr;
if (NumDigits < 1 || NumDigits > 8) {
if (Diags)
Diags->diagnose(CurPtr, diag::lex_invalid_u_escape);
return ~1U;
}
unsigned CharValue = 0;
StringRef(DigitStart, NumDigits).getAsInteger(16, CharValue);
return CharValue;
}
/// lexCharacter - Read a character and return its UTF32 code. If this is the
/// end of enclosing string/character sequence (i.e. the character is equal to
/// 'StopQuote'), this returns ~0U and leaves 'CurPtr' pointing to the terminal
/// quote. If this is a malformed character sequence, it emits a diagnostic
/// (when EmitDiagnostics is true) and returns ~1U.
///
/// character_escape ::= [\][\] | [\]t | [\]n | [\]r | [\]" | [\]' | [\]0
/// character_escape ::= unicode_character_escape
unsigned Lexer::lexCharacter(const char *&CurPtr, char StopQuote,
bool EmitDiagnostics) {
const char *CharStart = CurPtr;
switch (*CurPtr++) {
default: {// Normal characters are part of the string.
// If this is a "high" UTF-8 character, validate it.
if ((signed char)(CurPtr[-1]) >= 0) {
if (isPrintable(CurPtr[-1]) == 0)
if (EmitDiagnostics)
diagnose(CharStart, diag::lex_unprintable_ascii_character);
return CurPtr[-1];
}
--CurPtr;
unsigned CharValue = validateUTF8CharacterAndAdvance(CurPtr, BufferEnd);
if (CharValue != ~0U) return CharValue;
if (EmitDiagnostics)
diagnose(CharStart, diag::lex_invalid_utf8);
return ~1U;
}
case '"':
case '\'':
// If we found a closing quote character, we're done.
if (CurPtr[-1] == StopQuote) {
--CurPtr;
return ~0U;
}
// Otherwise, this is just a character.
return CurPtr[-1];
case 0:
if (CurPtr-1 != BufferEnd) {
if (EmitDiagnostics)
diagnose(CurPtr-1, diag::lex_nul_character);
return CurPtr[-1];
}
// Move the pointer back to EOF.
--CurPtr;
SWIFT_FALLTHROUGH;
case '\n': // String literals cannot have \n or \r in them.
case '\r':
if (EmitDiagnostics)
diagnose(CurPtr-1, diag::lex_unterminated_string);
return ~1U;
case '\\': // Escapes.
break;
}
unsigned CharValue = 0;
// Escape processing. We already ate the "\".
switch (*CurPtr) {
default: // Invalid escape.
if (EmitDiagnostics)
diagnose(CurPtr, diag::lex_invalid_escape);
// If this looks like a plausible escape character, recover as though this
// is an invalid escape.
if (isAlphanumeric(*CurPtr)) ++CurPtr;
return ~1U;
// Simple single-character escapes.
case '0': ++CurPtr; return '\0';
case 'n': ++CurPtr; return '\n';
case 'r': ++CurPtr; return '\r';
case 't': ++CurPtr; return '\t';
case '"': ++CurPtr; return '"';
case '\'': ++CurPtr; return '\'';
case '\\': ++CurPtr; return '\\';
case 'u': { // \u HEX HEX HEX HEX
++CurPtr;
if (*CurPtr != '{') {
if (EmitDiagnostics)
diagnose(CurPtr-1, diag::lex_unicode_escape_braces);
return ~1U;
}
CharValue = lexUnicodeEscape(CurPtr, EmitDiagnostics ? this : nullptr);
if (CharValue == ~1U) return ~1U;
break;
}
}
// Check to see if the encoding is valid.
llvm::SmallString<64> TempString;
if (CharValue >= 0x80 && EncodeToUTF8(CharValue, TempString)) {
if (EmitDiagnostics)
diagnose(CharStart, diag::lex_invalid_unicode_scalar);
return ~1U;
}
return CharValue;
}
/// skipToEndOfInterpolatedExpression - Given the first character after a \(
/// sequence in a string literal (the start of an interpolated expression),
/// scan forward to the end of the interpolated expression and return the end.
/// On success, the returned pointer will point to the ')' at the end of the
/// interpolated expression. On failure, it will point to the first character
/// that cannot be lexed as part of the interpolated expression; this character
/// will never be ')'.
///
/// This function performs brace and quote matching, keeping a stack of
/// outstanding delimiters as it scans the string.
static const char *skipToEndOfInterpolatedExpression(const char *CurPtr,
const char *EndPtr,
DiagnosticEngine *Diags) {
llvm::SmallVector<char, 4> OpenDelimiters;
auto inStringLiteral = [&]() {
return !OpenDelimiters.empty() &&
(OpenDelimiters.back() == '"' || OpenDelimiters.back() == '\'');
};
while (true) {
// This is a simple scanner, capable of recognizing nested parentheses and
// string literals but not much else. The implications of this include not
// being able to break an expression over multiple lines in an interpolated
// string. This limitation allows us to recover from common errors though.
//
// On success scanning the expression body, the real lexer will be used to
// relex the body when parsing the expressions. We let it diagnose any
// issues with malformed tokens or other problems.
switch (*CurPtr++) {
// String literals in general cannot be split across multiple lines;
// interpolated ones are no exception.
case '\n':
case '\r':
// Will be diagnosed as an unterminated string literal.
return CurPtr-1;
case '"':
case '\'':
if (inStringLiteral()) {
// Is it the closing quote?
if (OpenDelimiters.back() == CurPtr[-1]) {
OpenDelimiters.pop_back();
}
// Otherwise it's an ordinary character; treat it normally.
} else {
OpenDelimiters.push_back(CurPtr[-1]);
}
continue;
case '\\':
if (inStringLiteral()) {
char escapedChar = *CurPtr++;
switch (escapedChar) {
case '(':
// Entering a recursive interpolated expression
OpenDelimiters.push_back('(');
continue;
case '\n': case '\r': case 0:
// Don't jump over newline/EOF due to preceding backslash!
return CurPtr-1;
default:
continue;
}
}
continue;
case 0:
// If we hit EOF, we fail.
if (CurPtr-1 == EndPtr) {
if (Diags)
Diags->diagnose(Lexer::getSourceLoc(CurPtr-1),
diag::lex_unterminated_string);
return CurPtr-1;
}
continue;
// Paren nesting deeper to support "foo = \((a+b)-(c*d)) bar".
case '(':
if (!inStringLiteral()) {
OpenDelimiters.push_back('(');
}
continue;
case ')':
if (OpenDelimiters.empty()) {
// No outstanding open delimiters; we're done.
return CurPtr-1;
} else if (OpenDelimiters.back() == '(') {
// Pop the matching bracket and keep going.
OpenDelimiters.pop_back();
continue;
} else {
// It's a right parenthesis in a string literal.
assert(inStringLiteral());
continue;
}
default:
// Normal token character.
continue;
}
}
}
/// lexStringLiteral:
/// string_literal ::= ["]([^"\\\n\r]|character_escape)*["]
void Lexer::lexStringLiteral() {
const char *TokStart = CurPtr-1;
assert((*TokStart == '"' || *TokStart == '\'') && "Unexpected start");
// NOTE: We only allow single-quote string literals so we can emit useful
// diagnostics about changing them to double quotes.
bool wasErroneous = false;
while (true) {
if (*CurPtr == '\\' && *(CurPtr + 1) == '(') {
// Consume tokens until we hit the corresponding ')'.
CurPtr += 2;
const char *EndPtr =
skipToEndOfInterpolatedExpression(CurPtr, BufferEnd, Diags);
if (*EndPtr == ')') {
// Successfully scanned the body of the expression literal.
CurPtr = EndPtr+1;
} else {
CurPtr = EndPtr;
wasErroneous = true;
}
continue;
}
// String literals cannot have \n or \r in them.
if (*CurPtr == '\r' || *CurPtr == '\n' || CurPtr == BufferEnd) {
diagnose(TokStart, diag::lex_unterminated_string);
return formToken(tok::unknown, TokStart);
}
unsigned CharValue = lexCharacter(CurPtr, *TokStart, true);
wasErroneous |= CharValue == ~1U;
// If this is the end of string, we are done. If it is a normal character
// or an already-diagnosed error, just munch it.
if (CharValue == ~0U) {
CurPtr++;
if (wasErroneous)
return formToken(tok::unknown, TokStart);
if (*TokStart == '\'') {
// Complain about single-quote string and suggest replacement with
// double-quoted equivalent.
StringRef orig(TokStart, CurPtr - TokStart);
llvm::SmallString<32> replacement;
replacement += '"';
std::string str = orig.slice(1, orig.size() - 1).str();
std::string quot = "\"";
size_t pos = 0;
while (pos != str.length()) {
if (str.at(pos) == '\\') {
if (str.at(pos + 1) == '\'') {
// Un-escape escaped single quotes.
str.replace(pos, 2, "'");
++pos;
} else {
// Skip over escaped characters.
pos += 2;
}
} else if (str.at(pos) == '"') {
str.replace(pos, 1, "\\\"");
// Advance past the newly added ["\""].
pos += 2;
} else {
++pos;
}
}
replacement += StringRef(str);
replacement += '"';
diagnose(TokStart, diag::lex_single_quote_string)
.fixItReplaceChars(getSourceLoc(TokStart), getSourceLoc(CurPtr),
replacement);
}
return formToken(tok::string_literal, TokStart);
}
}
}
/// We found an opening curly quote in the source file. Scan ahead until we
/// find and end-curly-quote (or straight one). If we find what looks to be a
/// string literal, diagnose the problem and return a pointer to the end of the
/// entire string literal. This helps us avoid parsing the body of the string
/// as program tokens, which will only lead to massive confusion.
const char *Lexer::findEndOfCurlyQuoteStringLiteral(const char *Body) {
while (true) {
// Don't bother with string interpolations.
if (*Body == '\\' && *(Body + 1) == '(')
return nullptr;
// We didn't find the end of the string literal if we ran to end of line.
if (*Body == '\r' || *Body == '\n' || Body == BufferEnd)
return nullptr;
// Get the next character.
const char *CharStart = Body;
unsigned CharValue = lexCharacter(Body, '\0', false);
// If the character was incorrectly encoded, give up.
if (CharValue == ~1U) return nullptr;
// If we found a straight-quote, then we're done. Just return the spot
// to continue.
if (CharValue == '"')
return Body;
// If we found an ending curly quote (common since this thing started with
// an opening curly quote) diagnose it with a fixit and then return.
if (CharValue == 0x0000201D) {
diagnose(CharStart, diag::lex_invalid_curly_quote)
.fixItReplaceChars(getSourceLoc(CharStart), getSourceLoc(Body), "\"");
return Body;
}
// Otherwise, keep scanning.
}
}
/// lexEscapedIdentifier:
/// identifier ::= '`' identifier '`'
///
/// If it doesn't match this production, the leading ` is a punctuator.
void Lexer::lexEscapedIdentifier() {
assert(CurPtr[-1] == '`' && "Unexpected start of escaped identifier");
const char *Quote = CurPtr-1;
// Check whether we have an identifier followed by another backtick, in which
// case this is an escaped identifier.
const char *IdentifierStart = CurPtr;
if (advanceIfValidStartOfIdentifier(CurPtr, BufferEnd)) {
// Keep continuing the identifier.
while (advanceIfValidContinuationOfIdentifier(CurPtr, BufferEnd));
// If we have the terminating "`", it's an escaped identifier.
if (*CurPtr == '`') {
++CurPtr;
formToken(tok::identifier, Quote);
NextToken.setEscapedIdentifier(true);
return;
}
}
// The backtick is punctuation.
CurPtr = IdentifierStart;
formToken(tok::backtick, Quote);
}
void Lexer::tryLexEditorPlaceholder() {
assert(CurPtr[-1] == '<' && CurPtr[0] == '#');
const char *TokStart = CurPtr-1;
for (const char *Ptr = CurPtr+1; Ptr < BufferEnd-1; ++Ptr) {
if (*Ptr == '\n')
break;
if (Ptr[0] == '<' && Ptr[1] == '#')
break;
if (Ptr[0] == '#' && Ptr[1] == '>') {
// Found it. Flag it as error (or warning, if in playground mode) for the
// rest of the compiler pipeline and lex it as an identifier.
if (LangOpts.Playground) {
diagnose(TokStart, diag::lex_editor_placeholder_in_playground);
} else {
diagnose(TokStart, diag::lex_editor_placeholder);
}
CurPtr = Ptr+2;
formToken(tok::identifier, TokStart);
return;
}
}
// Not a well-formed placeholder.
lexOperatorIdentifier();
}
StringRef Lexer::getEncodedStringSegment(StringRef Bytes,
SmallVectorImpl<char> &TempString) {
TempString.clear();
// Note that it is always safe to read one over the end of "Bytes" because
// we know that there is a terminating " character. Use BytesPtr to avoid a
// range check subscripting on the StringRef.
const char *BytesPtr = Bytes.begin();
while (BytesPtr != Bytes.end()) {
char CurChar = *BytesPtr++;
if (CurChar != '\\') {
TempString.push_back(CurChar);
continue;
}
// Invalid escapes are accepted by the lexer but diagnosed as an error. We
// just ignore them here.
unsigned CharValue = 0; // Unicode character value for \x, \u, \U.
switch (*BytesPtr++) {
default:
continue; // Invalid escape, ignore it.
// Simple single-character escapes.
case '0': TempString.push_back('\0'); continue;
case 'n': TempString.push_back('\n'); continue;
case 'r': TempString.push_back('\r'); continue;
case 't': TempString.push_back('\t'); continue;
case '"': TempString.push_back('"'); continue;
case '\'': TempString.push_back('\''); continue;
case '\\': TempString.push_back('\\'); continue;
// String interpolation.
case '(':
llvm_unreachable("string contained interpolated segments");
// Unicode escapes of various lengths.
case 'u': // \u HEX HEX HEX HEX
if (BytesPtr[0] != '{')
continue; // Ignore invalid escapes.
CharValue = lexUnicodeEscape(BytesPtr, /*no diagnostics*/nullptr);
// Ignore invalid escapes.
if (CharValue == ~1U) continue;
break;
}
if (CharValue < 0x80)
TempString.push_back(CharValue);
else
EncodeToUTF8(CharValue, TempString);
}
// If we didn't escape or reprocess anything, then we don't need to use the
// temporary string, just point to the original one. We know that this
// is safe because unescaped strings are always shorter than their escaped
// forms (in a valid string).
if (TempString.size() == Bytes.size()) {
TempString.clear();
return Bytes;
}
return StringRef(TempString.begin(), TempString.size());
}
void Lexer::getStringLiteralSegments(
const Token &Str,
SmallVectorImpl<StringSegment> &Segments,
DiagnosticEngine *Diags) {
assert(Str.is(tok::string_literal));
// Get the bytes behind the string literal, dropping the double quotes.
StringRef Bytes = Str.getText().drop_front().drop_back();
// Note that it is always safe to read one over the end of "Bytes" because
// we know that there is a terminating " character. Use BytesPtr to avoid a
// range check subscripting on the StringRef.
const char *SegmentStartPtr = Bytes.begin();
const char *BytesPtr = SegmentStartPtr;
// FIXME: Use SSE to scan for '\'.
while (BytesPtr != Bytes.end()) {
char CurChar = *BytesPtr++;
if (CurChar != '\\')
continue;
if (*BytesPtr++ != '(')
continue;
// String interpolation.
// Push the current segment.
Segments.push_back(
StringSegment::getLiteral(getSourceLoc(SegmentStartPtr),
BytesPtr-SegmentStartPtr-2));
// Find the closing ')'.
const char *End = skipToEndOfInterpolatedExpression(BytesPtr,
Str.getText().end(),
Diags);
assert(*End == ')' && "invalid string literal interpolations should"
" not be returned as string literals");
++End;
// Add an expression segment.
Segments.push_back(
StringSegment::getExpr(getSourceLoc(BytesPtr-1), End-BytesPtr+1));
// Reset the beginning of the segment to the string that remains to be
// consumed.
SegmentStartPtr = BytesPtr = End;
}
Segments.push_back(
StringSegment::getLiteral(getSourceLoc(SegmentStartPtr),
Bytes.end()-SegmentStartPtr));
}
//===----------------------------------------------------------------------===//
// Main Lexer Loop
//===----------------------------------------------------------------------===//
void Lexer::lexImpl() {
assert(CurPtr >= BufferStart &&
CurPtr <= BufferEnd && "Current pointer out of range!");
NextToken.setAtStartOfLine(CurPtr == BufferStart);
// Remember where we started so that we can find the comment range.
LastCommentBlockStart = CurPtr;
SeenComment = false;
Restart:
// Remember the start of the token so we can form the text range.
const char *TokStart = CurPtr;
switch (*CurPtr++) {
default: {
char const *tmp = CurPtr-1;
if (advanceIfValidStartOfIdentifier(tmp, BufferEnd))
return lexIdentifier();
if (advanceIfValidStartOfOperator(tmp, BufferEnd))
return lexOperatorIdentifier();
if (advanceIfValidContinuationOfIdentifier(tmp, BufferEnd)) {
// If this is a valid identifier continuation, but not a valid identifier
// start, attempt to recover by eating more continuation characters.
diagnose(CurPtr-1, diag::lex_invalid_identifier_start_character);
while (advanceIfValidContinuationOfIdentifier(tmp, BufferEnd));
} else {
// This character isn't allowed in Swift source.
uint32_t codepoint = validateUTF8CharacterAndAdvance(tmp, BufferEnd);
if (codepoint == ~0U) {
diagnose(CurPtr-1, diag::lex_invalid_utf8)
.fixItReplaceChars(getSourceLoc(CurPtr-1), getSourceLoc(tmp), " ");
CurPtr = tmp;
goto Restart; // Skip presumed whitespace.
} else if (codepoint == 0x0000201D) {
// If this is an end curly quote, just diagnose it with a fixit hint.
diagnose(CurPtr-1, diag::lex_invalid_curly_quote)
.fixItReplaceChars(getSourceLoc(CurPtr-1), getSourceLoc(tmp), "\"");
} else if (codepoint == 0x0000201C) {
auto endPtr = tmp;
// If this is a start curly quote, do a fuzzy match of a string literal
// to improve recovery.
if (auto tmp2 = findEndOfCurlyQuoteStringLiteral(tmp))
tmp = tmp2;
// Note, we intentionally diagnose the end quote before the start quote,
// so that the IDE suggests fixing the end quote before the start quote.
// This, in turn, works better with our error recovery because we won't
// diagnose an end curly quote in the middle of a straight quoted
// literal.
diagnose(CurPtr-1, diag::lex_invalid_curly_quote)
.fixItReplaceChars(getSourceLoc(CurPtr-1), getSourceLoc(endPtr),"\"");
} else {
diagnose(CurPtr-1, diag::lex_invalid_character)
.fixItReplaceChars(getSourceLoc(CurPtr-1), getSourceLoc(tmp), " ");
CurPtr = tmp;
goto Restart; // Skip presumed whitespace.
}
}
CurPtr = tmp;
return formToken(tok::unknown, TokStart);
}
case '\n':
case '\r':
NextToken.setAtStartOfLine(true);
goto Restart; // Skip whitespace.
case ' ':
case '\t':
case '\f':
case '\v':
goto Restart; // Skip whitespace.
case -1:
case -2:
diagnose(CurPtr-1, diag::lex_utf16_bom_marker);
CurPtr = BufferEnd;
return formToken(tok::unknown, TokStart);
case 0:
if (CurPtr-1 == CodeCompletionPtr)
return formToken(tok::code_complete, TokStart);
// If this is a random nul character in the middle of a buffer, skip it as
// whitespace.
if (CurPtr-1 != BufferEnd) {
diagnoseEmbeddedNul(Diags, CurPtr-1);
goto Restart;
}
// Otherwise, this is the real end of the buffer. Put CurPtr back into
// buffer bounds.
CurPtr--;
// Return EOF.
return formToken(tok::eof, TokStart);
case '@': return formToken(tok::at_sign, TokStart);
case '{': return formToken(tok::l_brace, TokStart);
case '[': {
if (*CurPtr == '#') { // [#
CurPtr++;
return formToken(tok::l_square_lit, TokStart);
}
return formToken(tok::l_square, TokStart);
}
case '(': return formToken(tok::l_paren, TokStart);
case '}': return formToken(tok::r_brace, TokStart);
case ']': return formToken(tok::r_square, TokStart);
case ')':
return formToken(tok::r_paren, TokStart);
case ',': return formToken(tok::comma, TokStart);
case ';': return formToken(tok::semi, TokStart);
case ':': return formToken(tok::colon, TokStart);
case '#': {
if (*CurPtr == ']') { // #]
CurPtr++;
return formToken(tok::r_square_lit, TokStart);
}
if (getSubstring(TokStart + 1, 2).equals("if") &&
isWhitespace(CurPtr[2])) {
CurPtr += 2;
return formToken(tok::pound_if, TokStart);
}
if (getSubstring(TokStart + 1, 4).equals("else") &&
isWhitespace(CurPtr[4])) {
CurPtr += 4;
return formToken(tok::pound_else, TokStart);
}
if (getSubstring(TokStart + 1, 6).equals("elseif") &&
isWhitespace(CurPtr[6])) {
CurPtr += 6;
return formToken(tok::pound_elseif, TokStart);
}
if (getSubstring(TokStart + 1, 5).equals("endif") &&
(isWhitespace(CurPtr[5]) || CurPtr[5] == '\0')) {
CurPtr += 5;
return formToken(tok::pound_endif, TokStart);
}
if (getSubstring(TokStart + 1, 4).equals("line") &&
isWhitespace(CurPtr[4])) {
CurPtr += 4;
return formToken(tok::pound_line, TokStart);
}
if (getSubstring(TokStart + 1, 9).equals("available")) {
CurPtr += 9;
return formToken(tok::pound_available, TokStart);
}
// Allow a hashbang #! line at the beginning of the file.
if (CurPtr - 1 == BufferStart && *CurPtr == '!') {
CurPtr--;
if (BufferID != SourceMgr.getHashbangBufferID())
diagnose(CurPtr, diag::lex_hashbang_not_allowed);
skipHashbang();
goto Restart;
}
return formToken(tok::pound, TokStart);
}
// Operator characters.
case '/':
if (CurPtr[0] == '/') { // "//"
skipSlashSlashComment();
SeenComment = true;
if (isKeepingComments())
return formToken(tok::comment, TokStart);
goto Restart;
}
if (CurPtr[0] == '*') { // "/*"
skipSlashStarComment();
SeenComment = true;
if (isKeepingComments())
return formToken(tok::comment, TokStart);
goto Restart;
}
return lexOperatorIdentifier();
case '%':
// Lex %[0-9a-zA-Z_]+ as a local SIL value
if (InSILBody && clang::isIdentifierBody(CurPtr[0])) {
do {
++CurPtr;
} while (clang::isIdentifierBody(CurPtr[0]));
return formToken(tok::sil_local_name, TokStart);
}
return lexOperatorIdentifier();
case '!':
if (InSILBody)
return formToken(tok::sil_exclamation, TokStart);
if (isLeftBound(TokStart, BufferStart))
return formToken(tok::exclaim_postfix, TokStart);
return lexOperatorIdentifier();
case '?':
if (isLeftBound(TokStart, BufferStart))
return formToken(tok::question_postfix, TokStart);
return lexOperatorIdentifier();
case '<':
if (CurPtr[0] == '#')
return tryLexEditorPlaceholder();
SWIFT_FALLTHROUGH;
case '=': case '-': case '+': case '*': case '>':
case '&': case '|': case '^': case '~': case '.':
return lexOperatorIdentifier();
case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G':
case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N':
case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U':
case 'V': case 'W': case 'X': case 'Y': case 'Z':
case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g':
case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n':
case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u':
case 'v': case 'w': case 'x': case 'y': case 'z':
case '_':
return lexIdentifier();
case '$':
return lexDollarIdent();
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
return lexNumber();
case '"':
case '\'':
return lexStringLiteral();
case '`':
return lexEscapedIdentifier();
}
}
Token Lexer::getTokenAtLocation(const SourceManager &SM, SourceLoc Loc) {
// Don't try to do anything with an invalid location.
if (!Loc.isValid())
return Token();
// Figure out which buffer contains this location.
int BufferID = SM.findBufferContainingLoc(Loc);
if (BufferID < 0)
return Token();
// Use fake language options; language options only affect validity
// and the exact token produced.
LangOptions FakeLangOpts;
// Here we return comments as tokens because either the caller skipped
// comments and normally we won't be at the beginning of a comment token
// (making this option irrelevant), or the caller lexed comments and
// we need to lex just the comment token.
Lexer L(FakeLangOpts, SM, BufferID, nullptr, /*InSILMode=*/ false,
CommentRetentionMode::ReturnAsTokens);
L.restoreState(State(Loc));
return L.peekNextToken();
}
SourceLoc Lexer::getLocForEndOfToken(const SourceManager &SM, SourceLoc Loc) {
return Loc.getAdvancedLocOrInvalid(getTokenAtLocation(SM, Loc).getLength());
}
static SourceLoc getLocForStartOfTokenInBuf(SourceManager &SM,
unsigned BufferID,
unsigned Offset,
unsigned BufferStart,
unsigned BufferEnd,
bool InInterpolatedString) {
// Use fake language options; language options only affect validity
// and the exact token produced.
LangOptions FakeLangOptions;
Lexer L(FakeLangOptions, SM, BufferID, nullptr, /*InSILMode=*/false,
CommentRetentionMode::None, BufferStart, BufferEnd);
// Lex tokens until we find the token that contains the source location.
Token Tok;
do {
L.lex(Tok);
unsigned TokOffs = SM.getLocOffsetInBuffer(Tok.getLoc(), BufferID);
if (TokOffs > Offset) {
// We ended up skipping over the source location entirely, which means
// that it points into whitespace. We are done here.
break;
}
if (Offset < TokOffs+Tok.getLength()) {
// Current token encompasses our source location.
if (Tok.is(tok::string_literal)) {
assert(!InInterpolatedString);
SmallVector<Lexer::StringSegment, 4> Segments;
Lexer::getStringLiteralSegments(Tok, Segments, /*Diags=*/0);
for (auto &Seg : Segments) {
unsigned SegOffs = SM.getLocOffsetInBuffer(Seg.Loc, BufferID);
unsigned SegEnd = SegOffs+Seg.Length;
if (SegOffs > Offset)
break;
// If the offset is inside an interpolated expr segment, re-lex.
if (Seg.Kind == Lexer::StringSegment::Expr && Offset < SegEnd)
return getLocForStartOfTokenInBuf(SM, BufferID, Offset,
/*BufferStart=*/SegOffs,
/*BufferEnd=*/SegEnd,
/*InInterpolatedString=*/true);
}
}
return Tok.getLoc();
}
} while (Tok.isNot(tok::eof));
// We've passed our source location; just return the original source location.
return SM.getLocForOffset(BufferID, Offset);
}
// Find the start of the given line.
static const char *findStartOfLine(const char *bufStart, const char *current) {
while (current != bufStart) {
if (current[0] == '\n' || current[0] == '\r') {
++current;
break;
}
--current;
}
return current;
}
SourceLoc Lexer::getLocForStartOfToken(SourceManager &SM, unsigned BufferID,
unsigned Offset) {
CharSourceRange entireRange = SM.getRangeForBuffer(BufferID);
StringRef Buffer = SM.extractText(entireRange);
const char *BufStart = Buffer.data();
if (Offset > Buffer.size())
return SourceLoc();
const char *StrData = BufStart+Offset;
// If it points to whitespace return the SourceLoc for it.
if (StrData[0] == '\n' || StrData[0] == '\r' ||
StrData[0] == ' ' || StrData[0] == '\t')
return SM.getLocForOffset(BufferID, Offset);
// Back up from the current location until we hit the beginning of a line
// (or the buffer). We'll relex from that point.
const char *LexStart = findStartOfLine(BufStart, StrData);
return getLocForStartOfTokenInBuf(SM, BufferID, Offset,
/*BufferStart=*/LexStart-BufStart,
/*BufferEnd=*/Buffer.size(),
/*InInterpolatedString=*/false);
}
SourceLoc Lexer::getLocForStartOfLine(SourceManager &SM, SourceLoc Loc) {
// Don't try to do anything with an invalid location.
if (Loc.isInvalid())
return Loc;
// Figure out which buffer contains this location.
int BufferID = SM.findBufferContainingLoc(Loc);
if (BufferID < 0)
return SourceLoc();
CharSourceRange entireRange = SM.getRangeForBuffer(BufferID);
StringRef Buffer = SM.extractText(entireRange);
const char *BufStart = Buffer.data();
unsigned Offset = SM.getLocOffsetInBuffer(Loc, BufferID);
const char *StartOfLine = findStartOfLine(BufStart, BufStart + Offset);
return getSourceLoc(StartOfLine);
}
SourceLoc Lexer::getLocForEndOfLine(SourceManager &SM, SourceLoc Loc) {
// Don't try to do anything with an invalid location.
if (Loc.isInvalid())
return Loc;
// Figure out which buffer contains this location.
int BufferID = SM.findBufferContainingLoc(Loc);
if (BufferID < 0)
return SourceLoc();
// Use fake language options; language options only affect validity
// and the exact token produced.
LangOptions FakeLangOpts;
// Here we return comments as tokens because either the caller skipped
// comments and normally we won't be at the beginning of a comment token
// (making this option irrelevant), or the caller lexed comments and
// we need to lex just the comment token.
Lexer L(FakeLangOpts, SM, BufferID, nullptr, /*InSILMode=*/ false,
CommentRetentionMode::ReturnAsTokens);
L.restoreState(State(Loc));
L.skipToEndOfLine();
return getSourceLoc(L.CurPtr);
}
StringRef Lexer::getIndentationForLine(SourceManager &SM, SourceLoc Loc) {
// Don't try to do anything with an invalid location.
if (Loc.isInvalid())
return "";
// Figure out which buffer contains this location.
int BufferID = SM.findBufferContainingLoc(Loc);
if (BufferID < 0)
return "";
CharSourceRange entireRange = SM.getRangeForBuffer(BufferID);
StringRef Buffer = SM.extractText(entireRange);
const char *BufStart = Buffer.data();
unsigned Offset = SM.getLocOffsetInBuffer(Loc, BufferID);
const char *StartOfLine = findStartOfLine(BufStart, BufStart + Offset);
const char *EndOfIndentation = StartOfLine;
while (*EndOfIndentation && isHorizontalWhitespace(*EndOfIndentation))
++EndOfIndentation;
return StringRef(StartOfLine, EndOfIndentation - StartOfLine);
}
| MukeshKumarS/Swift | lib/Parse/Lexer.cpp | C++ | apache-2.0 | 63,217 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workspaces.model;
import javax.annotation.Generated;
/**
* <p>
* This operation is not supported.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class OperationNotSupportedException extends com.amazonaws.services.workspaces.model.AmazonWorkspacesException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new OperationNotSupportedException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public OperationNotSupportedException(String message) {
super(message);
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-workspaces/src/main/java/com/amazonaws/services/workspaces/model/OperationNotSupportedException.java | Java | apache-2.0 | 1,230 |
/*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.commands.spectrads3;
import com.spectralogic.ds3client.networking.HttpVerb;
import com.spectralogic.ds3client.commands.interfaces.AbstractRequest;
import com.spectralogic.ds3client.models.DataIsolationLevel;
import java.lang.Integer;
import com.spectralogic.ds3client.models.DataPersistenceRuleType;
import java.util.UUID;
import com.google.common.net.UrlEscapers;
public class ModifyDataPersistenceRuleSpectraS3Request extends AbstractRequest {
// Variables
private final String dataPersistenceRuleId;
private DataIsolationLevel isolationLevel;
private Integer minimumDaysToRetain;
private DataPersistenceRuleType type;
// Constructor
public ModifyDataPersistenceRuleSpectraS3Request(final UUID dataPersistenceRuleId) {
this.dataPersistenceRuleId = dataPersistenceRuleId.toString();
}
public ModifyDataPersistenceRuleSpectraS3Request(final String dataPersistenceRuleId) {
this.dataPersistenceRuleId = dataPersistenceRuleId;
}
public ModifyDataPersistenceRuleSpectraS3Request withIsolationLevel(final DataIsolationLevel isolationLevel) {
this.isolationLevel = isolationLevel;
this.updateQueryParam("isolation_level", isolationLevel);
return this;
}
public ModifyDataPersistenceRuleSpectraS3Request withMinimumDaysToRetain(final Integer minimumDaysToRetain) {
this.minimumDaysToRetain = minimumDaysToRetain;
this.updateQueryParam("minimum_days_to_retain", minimumDaysToRetain);
return this;
}
public ModifyDataPersistenceRuleSpectraS3Request withType(final DataPersistenceRuleType type) {
this.type = type;
this.updateQueryParam("type", type);
return this;
}
@Override
public HttpVerb getVerb() {
return HttpVerb.PUT;
}
@Override
public String getPath() {
return "/_rest_/data_persistence_rule/" + dataPersistenceRuleId;
}
public String getDataPersistenceRuleId() {
return this.dataPersistenceRuleId;
}
public DataIsolationLevel getIsolationLevel() {
return this.isolationLevel;
}
public Integer getMinimumDaysToRetain() {
return this.minimumDaysToRetain;
}
public DataPersistenceRuleType getType() {
return this.type;
}
} | DenverM80/ds3_java_sdk | ds3-sdk/src/main/java/com/spectralogic/ds3client/commands/spectrads3/ModifyDataPersistenceRuleSpectraS3Request.java | Java | apache-2.0 | 3,184 |
package org.andersonkmi.data;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Calendar;
public class UsuarioDAO extends BaseDAO {
public UsuarioDAO() {
super();
}
public Usuario findByLogin(String login) {
Usuario usuario = null;
String statement = "SELECT ID, LOGIN, NOME, PASSWORD, LAST_ACCESS FROM USUARIO WHERE LOGIN = ?";
Connection connection = null;
try {
connection = getConnection();
PreparedStatement query = connection.prepareStatement(statement);
query.setString(1, login);
ResultSet rs = query.executeQuery();
while(rs.next()) {
usuario = new Usuario();
usuario.setId(rs.getInt("ID"));
usuario.setName(rs.getString("NOME"));
usuario.setLogin(login);
usuario.setPassword(rs.getString("PASSWORD"));
Timestamp lastAccessTs = rs.getTimestamp("LAST_ACCESS");
if(lastAccessTs != null) {
Calendar lastAccess = Calendar.getInstance();
lastAccess.setTimeInMillis(lastAccessTs.getTime());
usuario.setLastAccess(lastAccess);
}
}
rs.close();
} catch (SQLException exception) {
StringBuffer buffer = new StringBuffer();
buffer.append("An error has occurred when searching for the user = '").append(login).append("'");
System.err.println(buffer.toString() + " - " + exception.getMessage());
} finally {
if(connection != null) {
try {
connection.close();
} catch (SQLException exception) {}
}
}
return usuario;
}
public Usuario findById(Integer id) {
Usuario usuario = null;
String statement = "SELECT ID, LOGIN, NOME, PASSWORD, LAST_ACCESS FROM USUARIO WHERE ID = ?";
Connection connection = null;
try {
connection = getConnection();
PreparedStatement query = connection.prepareStatement(statement);
query.setInt(1, id);
ResultSet rs = query.executeQuery();
while(rs.next()) {
usuario = new Usuario();
usuario.setId(rs.getInt("ID"));
usuario.setName(rs.getString("NOME"));
usuario.setLogin(rs.getString("LOGIN"));
usuario.setPassword(rs.getString("PASSWORD"));
Timestamp lastAccessTs = rs.getTimestamp("LAST_ACCESS");
if(lastAccessTs != null) {
Calendar lastAccess = Calendar.getInstance();
lastAccess.setTimeInMillis(lastAccessTs.getTime());
usuario.setLastAccess(lastAccess);
}
}
rs.close();
} catch (SQLException exception) {
StringBuffer buffer = new StringBuffer();
buffer.append("An error has occurred when searching for the user = '").append(id).append("'");
System.err.println(buffer.toString() + " - " + exception.getMessage());
} finally {
if(connection != null) {
try {
connection.close();
} catch (SQLException exception) {}
}
}
return usuario;
}
public void updateLastAccess(Usuario user) {
String updateStatement = "UPDATE USUARIO SET LAST_ACCESS = ? WHERE ID = ?";
Connection connection = null;
try {
connection = getConnection();
PreparedStatement statement = connection.prepareStatement(updateStatement);
statement.setTimestamp(1, new Timestamp(Calendar.getInstance().getTimeInMillis()));
statement.setInt(2, user.getId());
statement.executeUpdate();
} catch (SQLException exception) {
StringBuffer buffer = new StringBuffer();
buffer.append("An error has occurred when updating last access for user = '").append(user.getLogin()).append("'");
System.err.println(buffer.toString() + " - " + exception.getMessage());
} finally {
try {
if(connection != null) {
connection.close();
}
} catch (SQLException exception) {}
}
}
public void updateUser(Usuario user) {
String updateStatement = "UPDATE USUARIO SET NOME = ?, LOGIN = ? WHERE ID = ?";
Connection connection = null;
try {
connection = getConnection();
PreparedStatement statement = connection.prepareStatement(updateStatement);
statement.setTimestamp(1, new Timestamp(Calendar.getInstance().getTimeInMillis()));
statement.setInt(2, user.getId());
statement.executeUpdate();
} catch (SQLException exception) {
StringBuffer buffer = new StringBuffer();
buffer.append("An error has occurred when updating user = '").append(user.getLogin()).append("'");
System.err.println(buffer.toString() + " - " + exception.getMessage());
} finally {
try {
if(connection != null) {
connection.close();
}
} catch (SQLException exception) {}
}
}
}
| andersonkmi/bluemix | src/main/java/org/andersonkmi/data/UsuarioDAO.java | Java | apache-2.0 | 4,522 |