code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
Copyright 2014-2016 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apple.uikit;
import apple.NSObject;
import apple.foundation.NSArray;
import apple.foundation.NSCoder;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.protocol.NSCopying;
import apple.foundation.protocol.NSMutableCopying;
import apple.foundation.protocol.NSSecureCoding;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NFloat;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.ProtocolClassMethod;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
/**
* NSParagraphStyle
*/
@Generated
@Library("UIKit")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class NSParagraphStyle extends NSObject implements NSCopying, NSMutableCopying, NSSecureCoding {
static {
NatJ.register();
}
@Generated
protected NSParagraphStyle(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
@Generated
@Owned
@Selector("alloc")
public static native NSParagraphStyle alloc();
@Owned
@Generated
@Selector("allocWithZone:")
public static native NSParagraphStyle allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
/**
* This class property returns a shared and cached NSParagraphStyle instance with the default style settings, with same value as the result of [[NSParagraphStyle alloc] init].
*/
@Generated
@Selector("defaultParagraphStyle")
public static native NSParagraphStyle defaultParagraphStyle();
/**
* languageName is in ISO lang region format
*/
@Generated
@Selector("defaultWritingDirectionForLanguage:")
@NInt
public static native long defaultWritingDirectionForLanguage(String languageName);
@Generated
@Selector("description")
public static native String description_static();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
@Generated
@Owned
@Selector("new")
public static native NSParagraphStyle new_objc();
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("supportsSecureCoding")
public static native boolean supportsSecureCoding();
@Generated
@Selector("version")
@NInt
public static native long version_static();
@Generated
@Selector("alignment")
@NInt
public native long alignment();
/**
* Tightens inter-character spacing in attempt to fit lines wider than the available space if the line break mode is one of the truncation modes before starting to truncate. NO by default. The maximum amount of tightening performed is determined by the system based on contexts such as font, line width, etc.
*/
@Generated
@Selector("allowsDefaultTighteningForTruncation")
public native boolean allowsDefaultTighteningForTruncation();
@Generated
@Selector("baseWritingDirection")
@NInt
public native long baseWritingDirection();
@Generated
@Owned
@Selector("copyWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public native Object copyWithZone(VoidPtr zone);
/**
* The default tab interval used for locations beyond the last element in tabStops
*/
@Generated
@Selector("defaultTabInterval")
@NFloat
public native double defaultTabInterval();
@Generated
@Selector("encodeWithCoder:")
public native void encodeWithCoder(NSCoder coder);
/**
* Distance from margin to edge appropriate for text direction
*/
@Generated
@Selector("firstLineHeadIndent")
@NFloat
public native double firstLineHeadIndent();
/**
* Distance from margin to front edge of paragraph
*/
@Generated
@Selector("headIndent")
@NFloat
public native double headIndent();
/**
* Specifies the threshold for hyphenation. Valid values lie between 0.0 and 1.0 inclusive. Hyphenation will be attempted when the ratio of the text width as broken without hyphenation to the width of the line fragment is less than the hyphenation factor. When this takes on its default value of 0.0, the layout manager's hyphenation factor is used instead. When both are 0.0, hyphenation is disabled.
*/
@Generated
@Selector("hyphenationFactor")
public native float hyphenationFactor();
@Generated
@Selector("init")
public native NSParagraphStyle init();
@Generated
@Selector("initWithCoder:")
public native NSParagraphStyle initWithCoder(NSCoder coder);
@Generated
@Selector("lineBreakMode")
@NInt
public native long lineBreakMode();
/**
* Natural line height is multiplied by this factor (if positive) before being constrained by minimum and maximum line height.
*/
@Generated
@Selector("lineHeightMultiple")
@NFloat
public native double lineHeightMultiple();
/**
* "Leading": distance between the bottom of one line fragment and top of next (applied between lines in the same container). This value is included in the line fragment heights in layout manager.
*/
@Generated
@Selector("lineSpacing")
@NFloat
public native double lineSpacing();
/**
* 0 implies no maximum.
*/
@Generated
@Selector("maximumLineHeight")
@NFloat
public native double maximumLineHeight();
/**
* Line height is the distance from bottom of descenders to top of ascenders; basically the line fragment height. Does not include lineSpacing (which is added after this computation).
*/
@Generated
@Selector("minimumLineHeight")
@NFloat
public native double minimumLineHeight();
@Owned
@Generated
@Selector("mutableCopyWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public native Object mutableCopyWithZone(VoidPtr zone);
/**
* Distance between the bottom of this paragraph and top of next (or the beginning of its paragraphSpacingBefore, if any).
*/
@Generated
@Selector("paragraphSpacing")
@NFloat
public native double paragraphSpacing();
/**
* Distance between the bottom of the previous paragraph (or the end of its paragraphSpacing, if any) and the top of this paragraph.
*/
@Generated
@Selector("paragraphSpacingBefore")
@NFloat
public native double paragraphSpacingBefore();
@Generated
@ProtocolClassMethod("supportsSecureCoding")
public boolean _supportsSecureCoding() {
return supportsSecureCoding();
}
/**
* An array of NSTextTabs. Contents should be ordered by location. The default value is an array of 12 left-aligned tabs at 28pt interval
*/
@Generated
@Selector("tabStops")
public native NSArray<? extends NSTextTab> tabStops();
/**
* Distance from margin to back edge of paragraph; if negative or 0, from other margin
*/
@Generated
@Selector("tailIndent")
@NFloat
public native double tailIndent();
/**
* Specifies the line break strategies that may be used for laying out the paragraph. The default value is NSLineBreakStrategyNone.
*/
@Generated
@Selector("lineBreakStrategy")
@NUInt
public native long lineBreakStrategy();
/**
* A property controlling the hyphenation behavior for the paragraph associated with the paragraph style. The exact hyphenation logic is dynamically determined by the layout context such as language, platform, etc. When YES, it affects the return value from -hyphenationFactor when the property is set to 0.0.
*/
@Generated
@Selector("usesDefaultHyphenation")
public native boolean usesDefaultHyphenation();
}
| multi-os-engine/moe-core | moe.apple/moe.platform.ios/src/main/java/apple/uikit/NSParagraphStyle.java | Java | apache-2.0 | 11,103 |
/*
* Copyright 2004-2011 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.seasar.dao.node;
import org.seasar.dao.CommandContext;
import org.seasar.dao.IllegalBoolExpressionRuntimeException;
import org.seasar.framework.util.OgnlUtil;
/**
* @author higa
*
*/
public class IfNode extends ContainerNode {
private String expression;
private Object parsedExpression;
private ElseNode elseNode;
public IfNode(String expression) {
this.expression = expression;
this.parsedExpression = OgnlUtil.parseExpression(expression);
}
public String getExpression() {
return expression;
}
public ElseNode getElseNode() {
return elseNode;
}
public void setElseNode(ElseNode elseNode) {
this.elseNode = elseNode;
}
/**
* @see org.seasar.dao.Node#accept(org.seasar.dao.QueryContext)
*/
public void accept(CommandContext ctx) {
Object result = OgnlUtil.getValue(parsedExpression, ctx);
if (result instanceof Boolean) {
if (((Boolean) result).booleanValue()) {
super.accept(ctx);
ctx.setEnabled(true);
} else if (elseNode != null) {
elseNode.accept(ctx);
ctx.setEnabled(true);
}
} else {
throw new IllegalBoolExpressionRuntimeException(expression);
}
}
}
| seasarorg/s2dao | s2-dao/src/main/java/org/seasar/dao/node/IfNode.java | Java | apache-2.0 | 2,029 |
package uk.ac.ox.zoo.seeg.abraid.mp.common.service.workflow.support.extent;
import org.junit.Test;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.DiseaseExtentClass;
import uk.ac.ox.zoo.seeg.abraid.mp.common.domain.DiseaseOccurrence;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for DiseaseExtentGenerationOutputData.
* Copyright (c) 2015 University of Oxford
*/
public class DiseaseExtentGenerationOutputDataTest {
@Test
public void constructorBindsFieldsCorrectly() {
// Arrange
Map<Integer, DiseaseExtentClass> diseaseExtentClasses = new HashMap<>();
Map<Integer, Integer> occurrenceCounts = new HashMap<>();
Map<Integer, Collection<DiseaseOccurrence>> latestOccurrences = new HashMap<>();
// Act
DiseaseExtentGenerationOutputData result = new DiseaseExtentGenerationOutputData(
diseaseExtentClasses,
occurrenceCounts,
latestOccurrences);
// Assert
assertThat(result.getDiseaseExtentClassByGaulCode()).isSameAs(diseaseExtentClasses);
assertThat(result.getOccurrenceCounts()).isSameAs(occurrenceCounts);
assertThat(result.getLatestOccurrencesByGaulCode()).isSameAs(latestOccurrences);
}
}
| SEEG-Oxford/ABRAID-MP | src/Common/test/uk/ac/ox/zoo/seeg/abraid/mp/common/service/workflow/support/extent/DiseaseExtentGenerationOutputDataTest.java | Java | apache-2.0 | 1,345 |
/**
* Copyright (c) 2013-2019 Contributors to the Eclipse Foundation
*
* <p> See the NOTICE file distributed with this work for additional information regarding copyright
* ownership. All rights reserved. This program and the accompanying materials are made available
* under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
* available at http://www.apache.org/licenses/LICENSE-2.0.txt
*/
package org.locationtech.geowave.mapreduce;
import org.apache.hadoop.io.Writable;
import org.locationtech.geowave.core.store.api.DataTypeAdapter;
/**
* This is an interface that extends data adapter to allow map reduce jobs to easily convert hadoop
* writable objects to and from the geowave native representation of the objects. This allow for
* generally applicable map reduce jobs to be written using base classes for the mapper that can
* handle translations.
*
* @param <T> the native type
* @param <W> the writable type
*/
public interface HadoopDataAdapter<T, W extends Writable> extends DataTypeAdapter<T> {
public HadoopWritableSerializer<T, W> createWritableSerializer();
}
| spohnan/geowave | core/mapreduce/src/main/java/org/locationtech/geowave/mapreduce/HadoopDataAdapter.java | Java | apache-2.0 | 1,132 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer;
import com.google.android.exoplayer.util.Util;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Encapsulates the information describing the format of media data, be it audio
* or video.
*/
public class MediaFormat
{
public static final int NO_VALUE = -1;
public final String mimeType;
public final int maxInputSize;
public final int width;
public final int height;
public final int channelCount;
public final int sampleRate;
private int maxWidth;
private int maxHeight;
public final List<byte[]> initializationData;
// Lazy-initialized hashcode.
private int hashCode;
// Possibly-lazy-initialized framework media format.
private android.media.MediaFormat frameworkMediaFormat;
@TargetApi(16)
public static MediaFormat createFromFrameworkMediaFormatV16(android.media.MediaFormat format)
{
return new MediaFormat(format);
}
public static MediaFormat createVideoFormat(String mimeType, int maxInputSize, int width, int height, List<byte[]> initializationData)
{
return new MediaFormat(mimeType, maxInputSize, width, height, NO_VALUE, NO_VALUE, initializationData);
}
public static MediaFormat createAudioFormat(String mimeType, int maxInputSize, int channelCount, int sampleRate, List<byte[]> initializationData)
{
return new MediaFormat(mimeType, maxInputSize, NO_VALUE, NO_VALUE, channelCount, sampleRate, initializationData);
}
@TargetApi(16)
private MediaFormat(android.media.MediaFormat format)
{
this.frameworkMediaFormat = format;
mimeType = format.getString(android.media.MediaFormat.KEY_MIME);
maxInputSize = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_MAX_INPUT_SIZE);
width = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_WIDTH);
height = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_HEIGHT);
channelCount = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_CHANNEL_COUNT);
sampleRate = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_SAMPLE_RATE);
initializationData = new ArrayList<byte[]>();
for (int i = 0; format.containsKey("csd-" + i); i++)
{
ByteBuffer buffer = format.getByteBuffer("csd-" + i);
byte[] data = new byte[buffer.limit()];
buffer.get(data);
initializationData.add(data);
buffer.flip();
}
maxWidth = NO_VALUE;
maxHeight = NO_VALUE;
}
private MediaFormat(String mimeType, int maxInputSize, int width, int height, int channelCount, int sampleRate, List<byte[]> initializationData)
{
this.mimeType = mimeType;
this.maxInputSize = maxInputSize;
this.width = width;
this.height = height;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.initializationData = initializationData == null ? Collections.<byte[]> emptyList() : initializationData;
maxWidth = NO_VALUE;
maxHeight = NO_VALUE;
}
public void setMaxVideoDimensions(int maxWidth, int maxHeight)
{
this.maxWidth = maxWidth;
this.maxHeight = maxHeight;
if (frameworkMediaFormat != null)
{
maybeSetMaxDimensionsV16(frameworkMediaFormat);
}
}
public int getMaxVideoWidth()
{
return maxWidth;
}
public int getMaxVideoHeight()
{
return maxHeight;
}
@Override
public int hashCode()
{
if (hashCode == 0)
{
int result = 17;
result = 31 * result + mimeType == null ? 0 : mimeType.hashCode();
result = 31 * result + maxInputSize;
result = 31 * result + width;
result = 31 * result + height;
result = 31 * result + maxWidth;
result = 31 * result + maxHeight;
result = 31 * result + channelCount;
result = 31 * result + sampleRate;
for (int i = 0; i < initializationData.size(); i++)
{
result = 31 * result + Arrays.hashCode(initializationData.get(i));
}
hashCode = result;
}
return hashCode;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null || getClass() != obj.getClass())
{
return false;
}
return equalsInternal((MediaFormat) obj, false);
}
public boolean equals(MediaFormat other, boolean ignoreMaxDimensions)
{
if (this == other)
{
return true;
}
if (other == null)
{
return false;
}
return equalsInternal(other, ignoreMaxDimensions);
}
private boolean equalsInternal(MediaFormat other, boolean ignoreMaxDimensions)
{
if (maxInputSize != other.maxInputSize || width != other.width || height != other.height || (!ignoreMaxDimensions && (maxWidth != other.maxWidth || maxHeight != other.maxHeight)) || channelCount != other.channelCount
|| sampleRate != other.sampleRate || !Util.areEqual(mimeType, other.mimeType) || initializationData.size() != other.initializationData.size())
{
return false;
}
for (int i = 0; i < initializationData.size(); i++)
{
if (!Arrays.equals(initializationData.get(i), other.initializationData.get(i)))
{
return false;
}
}
return true;
}
@Override
public String toString()
{
return "MediaFormat(" + mimeType + ", " + maxInputSize + ", " + width + ", " + height + ", " + channelCount + ", " + sampleRate + ", " + maxWidth + ", " + maxHeight + ")";
}
/**
* @return A {@link MediaFormat} representation of this format.
*/
@TargetApi(16)
public final android.media.MediaFormat getFrameworkMediaFormatV16()
{
if (frameworkMediaFormat == null)
{
android.media.MediaFormat format = new android.media.MediaFormat();
format.setString(android.media.MediaFormat.KEY_MIME, mimeType);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_WIDTH, width);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_HEIGHT, height);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_CHANNEL_COUNT, channelCount);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_SAMPLE_RATE, sampleRate);
for (int i = 0; i < initializationData.size(); i++)
{
format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
maybeSetMaxDimensionsV16(format);
frameworkMediaFormat = format;
}
return frameworkMediaFormat;
}
@SuppressLint("InlinedApi")
@TargetApi(16)
private final void maybeSetMaxDimensionsV16(android.media.MediaFormat format)
{
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_MAX_WIDTH, maxWidth);
maybeSetIntegerV16(format, android.media.MediaFormat.KEY_MAX_HEIGHT, maxHeight);
}
@TargetApi(16)
private static final void maybeSetIntegerV16(android.media.MediaFormat format, String key, int value)
{
if (value != NO_VALUE)
{
format.setInteger(key, value);
}
}
@TargetApi(16)
private static final int getOptionalIntegerV16(android.media.MediaFormat format, String key)
{
return format.containsKey(key) ? format.getInteger(key) : NO_VALUE;
}
}
| summerpulse/amlexo | src/com/google/android/exoplayer/MediaFormat.java | Java | apache-2.0 | 8,647 |
/**
* Copyright 2010 Wealthfront Inc. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.kaching.platform.testing;
import static java.lang.annotation.ElementType.TYPE;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Under the regime of {@link LessIOSecurityManager}, only classes annotated
* with this annotation may use the DNS system to resolve hostnames or IP
* addresses.
*
* Annotating a class with {@link AllowNetworkMulticast},
* {@link AllowNetworkListen}, or {@link AllowNetworkAccess}, implies permission
* to use the DNS system as described above.
*/
@Retention(RUNTIME)
@Target(TYPE)
public @interface AllowDNSResolution {
}
| wealthfront/kawala | kawala-testing/src/main/java/com/kaching/platform/testing/AllowDNSResolution.java | Java | apache-2.0 | 1,246 |
package com.google.api.ads.dfp.jaxws.v201511;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for SwiffyConversionError.Reason.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="SwiffyConversionError.Reason">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="SERVER_ERROR"/>
* <enumeration value="INVALID_FLASH_FILE"/>
* <enumeration value="UNSUPPORTED_FLASH"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "SwiffyConversionError.Reason")
@XmlEnum
public enum SwiffyConversionErrorReason {
/**
*
* Indicates the Swiffy service has an internal error that prevents the flash
* asset being converted.
*
*
*/
SERVER_ERROR,
/**
*
* Indicates the uploaded flash asset is not a valid flash file.
*
*
*/
INVALID_FLASH_FILE,
/**
*
* Indicates the Swiffy service currently does not support converting this
* flash asset.
*
*
*/
UNSUPPORTED_FLASH,
/**
*
* The value returned if the actual value is not exposed by the requested API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static SwiffyConversionErrorReason fromValue(String v) {
return valueOf(v);
}
}
| gawkermedia/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201511/SwiffyConversionErrorReason.java | Java | apache-2.0 | 1,709 |
"""REST API for DP.LA Service Hub BIBCAT Aggregator Feed"""
__author__ = "Jeremy Nelson, Mike Stabile"
import click
import datetime
import json
import math
import os
import pkg_resources
import xml.etree.ElementTree as etree
import requests
import rdflib
import urllib.parse
import reports
import bibcat.rml.processor as processor
from zipfile import ZipFile, ZIP_DEFLATED
from elasticsearch_dsl import Search, Q
from flask import abort, Flask, jsonify, request, render_template, Response
from flask import flash, url_for
from flask import flash
#from flask_cache import Cache
from resync import CapabilityList, ResourceDump, ResourceDumpManifest
from resync import ResourceList
from resync.resource import Resource
from resync.resource_list import ResourceListDupeError
from resync.dump import Dump
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile('config.py')
from rdfframework.rml import RmlManager
from rdfframework.configuration import RdfConfigManager
from rdfframework.datamanager import DefinitionManager
from rdfframework.datatypes import RdfNsManager
RmlManager().register_defs([('package_all', 'bibcat.maps')])
# Define vocabulary and definition file locations
DefinitionManager().add_file_locations([('vocabularies', ['rdf',
'rdfs',
'owl',
'schema',
'bf',
'skos',
'dcterm']),
('package_all',
'bibcat.rdfw-definitions')])
# Register RDF namespaces to use
RdfNsManager({'acl': '<http://www.w3.org/ns/auth/acl#>',
'bd': '<http://www.bigdata.com/rdf#>',
'bf': 'http://id.loc.gov/ontologies/bibframe/',
'dbo': 'http://dbpedia.org/ontology/',
'dbp': 'http://dbpedia.org/property/',
'dbr': 'http://dbpedia.org/resource/',
'dc': 'http://purl.org/dc/elements/1.1/',
'dcterm': 'http://purl.org/dc/terms/',
'dpla': 'http://dp.la/about/map/',
'edm': 'http://www.europeana.eu/schemas/edm/',
'es': 'http://knowledgelinks.io/ns/elasticsearch/',
'foaf': 'http://xmlns.com/foaf/0.1/',
'loc': 'http://id.loc.gov/authorities/',
'm21': '<http://knowledgelinks.io/ns/marc21/>',
'mads': '<http://www.loc.gov/mads/rdf/v1#>',
'mods': 'http://www.loc.gov/mods/v3#',
'ore': 'http://www.openarchives.org/ore/terms/',
'owl': 'http://www.w3.org/2002/07/owl#',
'relators': 'http://id.loc.gov/vocabulary/relators/',
'schema': 'http://schema.org/',
'skos': 'http://www.w3.org/2004/02/skos/core#',
'xsd': 'http://www.w3.org/2001/XMLSchema#'})
CONFIG_MANAGER = RdfConfigManager(app.config, verify=False)
CONNECTIONS = CONFIG_MANAGER.conns
BF = rdflib.Namespace("http://id.loc.gov/ontologies/bibframe/")
W3C_DATE = "%Y-%m-%dT%H:%M:%SZ"
__version__ = "1.0.0"
#cache = Cache(app, config={"CACHE_TYPE": "filesystem",
# "CACHE_DIR": os.path.join(PROJECT_BASE, "cache")})
def __run_query__(query):
"""Helper function returns results from sparql query"""
result = requests.post(app.config.get("TRIPLESTORE_URL"),
data={"query": query,
"format": "json"})
if result.status_code < 400:
return result.json().get('results').get('bindings')
def __get_instances__(offset=0):
"""Helper function used by siteindex and resourcedump views
Args:
offset(int): offset number of records
"""
offset = int(offset)*50000
sparql = """
SELECT DISTINCT ?instance ?date
WHERE {{
?instance rdf:type bf:Instance .
OPTIONAL {{
?instance bf:generationProcess ?process .
?process bf:generationDate ?date .
}}
}} ORDER BY ?instance
LIMIT 50000
OFFSET {0}""".format(offset)
instances = CONNECTIONS.datastore.query(sparql)
return instances
def __get_mod_date__(entity_iri=None):
if "MOD_DATE" in app.config:
return app.config.get("MOD_DATE")
return datetime.datetime.utcnow().strftime(W3C_DATE)
def __generate_profile__(instance_uri):
search = Search(using=CONNECTIONS.search.es).query(
Q("term", uri="{}#Work".format(instance_uri))).source(
["bf_hasInstance.bf_hasItem.rml_map.map4_json_ld"])
result = search.execute()
if len(result.hits.hits) < 1:
#abort(404)
#click.echo("{}#Work not found".format(instance_uri))
return
if len(result.hits.hits[0]["_source"]) < 1:
#abort(404)
#click.echo("{}#Work missing _source".format(instance_uri))
return
raw_map4 = result.hits.hits[0]["_source"]["bf_hasInstance"][0]\
["bf_hasItem"][0]["rml_map"]["map4_json_ld"]
return raw_map4
def __generate_resource_dump__():
r_dump = ResourceDump()
r_dump.ln.append({"rel": "resourcesync",
"href": url_for('capability_list')})
bindings = CONNECTIONS.datastore.query("""
SELECT (count(?s) as ?count) WHERE {
?s rdf:type bf:Instance .
?item bf:itemOf ?s .
}""")
count = int(bindings[0].get('count').get('value'))
shards = math.ceil(count/50000)
for i in range(0, shards):
zip_info = __generate_zip_file__(i)
try:
zip_modified = datetime.datetime.fromtimestamp(zip_info.get('date'))
last_mod = zip_modified.strftime("%Y-%m-%d")
except TypeError:
last_mod = zip_info.get('date')[0:10]
click.echo("Total errors {:,}".format(len(zip_info.get('errors'))))
r_dump.add(
Resource(url_for('resource_zip',
count=i*50000),
lastmod=last_mod,
mime_type="application/zip",
length=zip_info.get("size")
)
)
return r_dump
def __generate_zip_file__(offset=0):
start = datetime.datetime.utcnow()
click.echo("Started at {}".format(start.ctime()))
manifest = ResourceDumpManifest()
manifest.modified = datetime.datetime.utcnow().isoformat()
manifest.ln.append({"rel": "resourcesync",
"href": url_for('capability_list')})
file_name = "{}-{:03}.zip".format(
datetime.datetime.utcnow().toordinal(),
offset)
tmp_location = os.path.join(app.config.get("DIRECTORIES")[0].get("path"),
"dump/{}".format(file_name))
if os.path.exists(tmp_location) is True:
return {"date": os.path.getmtime(tmp_location),
"size": os.path.getsize(tmp_location)}
dump_zip = ZipFile(tmp_location,
mode="w",
compression=ZIP_DEFLATED,
allowZip64=True)
instances = __get_instances__(offset)
errors = []
click.echo("Iterating through {:,} instances".format(len(instances)))
for i,row in enumerate(instances):
instance_iri = row.get("instance").get('value')
key = instance_iri.split("/")[-1]
if not "date" in row:
last_mod = __get_mod_date__()
else:
last_mod = "{}".format(row.get("date").get("value")[0:10])
path = "resources/{}.json".format(key)
if not i%25 and i > 0:
click.echo(".", nl=False)
if not i%100:
click.echo("{:,}".format(i), nl=False)
raw_json = __generate_profile__(instance_iri)
if raw_json is None:
errors.append(instance_iri)
continue
elif len(raw_json) < 1:
click.echo(instance_iri, nl=False)
break
dump_zip.writestr(path,
raw_json)
manifest.add(
Resource(instance_iri,
lastmod=last_mod,
length="{}".format(len(raw_json)),
path=path))
dump_zip.writestr("manifest.xml", manifest.as_xml())
dump_zip.close()
end = datetime.datetime.utcnow()
zip_size = os.stat(tmp_location).st_size
click.echo("Finished at {}, total time {} min, size={}".format(
end.ctime(),
(end-start).seconds / 60.0,
i))
return {"date": datetime.datetime.utcnow().isoformat(),
"size": zip_size,
"errors": errors}
@app.template_filter("pretty_num")
def nice_number(raw_number):
if raw_number is None:
return ''
return "{:,}".format(int(raw_number))
@app.errorhandler(404)
def page_not_found(e):
return render_template("404.html", error=e), 404
@app.route("/")
def home():
"""Default page"""
result = CONNECTIONS.datastore.query(
"SELECT (COUNT(*) as ?count) WHERE {?s ?p ?o }")
count = result[0].get("count").get("value")
if int(count) < 1:
flash("Triplestore is empty, please load service hub RDF data")
return render_template("index.html",
version=__version__,
count="{:,}".format(int(count)))
@app.route("/reports/")
@app.route("/reports/<path:name>")
def reporting(name=None):
if name is None:
return render_template("reports/index.html")
report_output = reports.report_router(name)
if report_output is None:
abort(404)
return render_template(
"reports/{0}.html".format(name),
data=report_output)
@app.route("/<path:type_of>/<path:name>")
def authority_view(type_of, name=None):
"""Generates a RDF:Description view for Service Hub name,
topic, agent, and other types of BIBFRAME entities
Args:
type_of(str): Type of entity
name(str): slug of name, title, or other textual identifier
"""
if name is None:
# Display brows view of authorities
return "Browse display for {}".format(type_of)
uri = "{0}{1}/{2}".format(app.config.get("BASE_URL"),
type_of,
name)
entity_sparql = PREFIX + """
SELECT DISTINCT ?label ?value
WHERE {{
<{entity}> rdf:type {type_of} .
OPTIONAL {{
<{entity}> rdfs:label ?label
}}
OPTIONAL {{
<{entity}> rdf:value ?value
}}
}}""".format(entity=uri,
type_of="bf:{}".format(type_of.title()))
entity_results = __run_query__(entity_sparql)
if len(entity_results) < 1:
abort(404)
entity_graph = rdflib.Graph()
iri = rdflib.URIRef(uri)
entity_graph.add((iri, rdflib.RDF.type, getattr(BF, type_of.title())))
for row in entity_results:
if 'label' in row:
literal = rdflib.Literal(row.get('label').get('value'),
datatype=row.get('label').get('datatype'))
entity_graph.add((iri, rdflib.RDFS.label, literal))
if 'value' in row:
literal = rdflib.Literal(row.get('value').get('value'),
datatype=row.get('value').get('datatype'))
entity_graph.add((iri, rdflib.RDF.value, literal))
MAPv4_context["bf"] = str(BF)
raw_entity = entity_graph.serialize(format='json-ld',
context=MAPv4_context)
return Response(raw_entity, mimetype="application/json")
@app.route("/capabilitylist.xml")
def capability_list():
cap_list = CapabilityList()
cap_list.modified = __get_mod_date__()
cap_list.ln.append({"href": url_for('capability_list'),
"rel": "describedby",
"type": "application/xml"})
cap_list.add(Resource(url_for('site_index'),
capability="resourcelist"))
cap_list.add(Resource(url_for('resource_dump'),
capability="resourcedump"))
return Response(cap_list.as_xml(),
mimetype="text/xml")
@app.route("/resourcedump.xml")
def resource_dump():
xml = __generate_resource_dump__()
return Response(xml.as_xml(),
"text/xml")
@app.route("/resourcedump-<int:count>.zip")
def resource_zip(count):
zip_location = os.path.join(app.config.get("DIRECTORIES")[0].get("path"),
"dump/resour{}".format(file_name))
zip_location = os.path.join(PROJECT_BASE,
"dump/{}.zip".format(count))
if not os.path.exists(zip_location):
abort(404)
return send_file(zip_location)
@app.route("/siteindex.xml")
#@cache.cached(timeout=86400) # Cached for 1 day
def site_index():
"""Generates siteindex XML, each sitemap has a maximum of 50k links
dynamically generates the necessary number of sitemaps in the
template"""
result = CONNECTIONS.datastore.query("""SELECT (count(?work) as ?count)
WHERE {
?work rdf:type bf:Work .
?instance bf:instanceOf ?work .
?item bf:itemOf ?instance . }""")
count = int(result[0].get('count').get('value'))
shards = math.ceil(count/50000)
mod_date = app.config.get('MOD_DATE')
if mod_date is None:
mod_date=datetime.datetime.utcnow().strftime("%Y-%m-%d")
xml = render_template("siteindex.xml",
count=range(1, shards+1),
last_modified=mod_date)
return Response(xml, mimetype="text/xml")
@app.route("/sitemap<int:offset>.xml", methods=["GET"])
#@cache.cached(timeout=86400)
def sitemap(offset=0):
if offset > 0:
offset = offset - 1
instances = __get_instances__(offset)
resource_list = ResourceList()
dedups = 0
for i,row in enumerate(instances):
instance = row.get('instance')
if "date" in row:
last_mod = row.get("date").get("value")[0:10]
else:
last_mod = datetime.datetime.utcnow().strftime(
W3C_DATE)
try:
resource_list.add(
Resource("{}.json".format(instance.get("value")),
lastmod=last_mod)
)
except ResourceListDupeError:
dedups += 1
continue
xml = resource_list.as_xml()
return Response(xml, mimetype="text/xml")
@app.route("/<path:uid>.json")
def detail(uid=None):
"""Generates DPLA Map V4 JSON-LD"""
if uid.startswith('favicon'):
return ''
click.echo("UID is {}".format(uid))
if uid is None:
abort(404)
uri = app.config.get("BASE_URL") + uid
raw_map_4 = __generate_profile__(uri)
return Response(raw_map_4, mimetype="application/json")
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| KnowledgeLinks/dpla-service-hub | api.py | Python | apache-2.0 | 14,727 |
package ru.job4j.convertation;
import java.util.ArrayList;
import java.util.List;
public class ConvertList {
public ArrayList<Integer> toList(int[][] array) {
ArrayList<Integer> list = new ArrayList<>();
for (int i = 0; i < array.length; i++) {
for (int j = 0; j < array.length; j++) {
list.add(array[i][j]);
}
}
return list;
}
public int[][] toArray(ArrayList<Integer> list, int rows) {
int[][] array = new int[rows][rows];
int mod = list.size() % rows;
for (int i = 0; i < mod; i++) {
list.add(0);
}
int[] arr = list.stream().mapToInt(i->i).toArray();
int temp = -1;
for (int i = 0; i < arr.length; i++) {
if ((i % rows) == 0)
temp++;
array[temp][i % rows] = arr[i];
}
return array;
}
public List<Integer> convert(List<int[]> list) {
List<Integer> result = new ArrayList<Integer>();
for (int i = 0; i < list.size(); i++) {
for (Integer elem : list.get(i)) {
result.add(elem);
}
}
return result;
}
}
| Clydeside/ALipatov | chapter_003/src/main/java/ru/job4j/convertation/ConvertList.java | Java | apache-2.0 | 1,197 |
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.cn.jee.modules.qrtz.web;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import com.cn.jee.common.config.Global;
import com.cn.jee.common.persistence.Page;
import com.cn.jee.common.utils.StringUtils;
import com.cn.jee.common.web.BaseController;
import com.cn.jee.modules.qrtz.entity.QrtzJobDetails;
import com.cn.jee.modules.qrtz.service.QrtzJobDetailsService;
/**
* 触发器Job明细Controller
*
* @author 1002360
* @version 2017-03-18
*/
@Controller
@RequestMapping(value = "${adminPath}/qrtz/qrtzJobDetails")
public class QrtzJobDetailsController extends BaseController {
@Autowired
private QrtzJobDetailsService qrtzJobDetailsService;
@ModelAttribute
public QrtzJobDetails get(@RequestParam(value = "schedName", required = false) String schedName, @RequestParam(value = "jobName", required = false) String jobName, @RequestParam(value = "jobGroup", required = false) String jobGroup) {
QrtzJobDetails entity = null;
if (StringUtils.isNotBlank(schedName) && StringUtils.isNotBlank(jobName) && StringUtils.isNotBlank(jobGroup)) {
entity = qrtzJobDetailsService.get(schedName, jobName, jobGroup);
}
if (entity == null) {
entity = new QrtzJobDetails();
}
return entity;
}
@RequiresPermissions("qrtz:qrtzJobDetails:view")
@RequestMapping(value = { "list", "" })
public String list(QrtzJobDetails qrtzJobDetails, HttpServletRequest request, HttpServletResponse response, Model model) {
Page<QrtzJobDetails> page = qrtzJobDetailsService.findPage(new Page<QrtzJobDetails>(request, response), qrtzJobDetails);
model.addAttribute("page", page);
return "modules/qrtz/qrtzJobDetailsList";
}
@RequiresPermissions("qrtz:qrtzJobDetails:view")
@RequestMapping(value = "form")
public String form(QrtzJobDetails qrtzJobDetails, Model model) {
model.addAttribute("qrtzJobDetails", qrtzJobDetails);
return "modules/qrtz/qrtzJobDetailsForm";
}
@RequiresPermissions("qrtz:qrtzJobDetails:edit")
@RequestMapping(value = "save")
public String save(QrtzJobDetails qrtzJobDetails, Model model, RedirectAttributes redirectAttributes) {
if (!beanValidator(model, qrtzJobDetails)) {
return form(qrtzJobDetails, model);
}
qrtzJobDetailsService.save(qrtzJobDetails);
addMessage(redirectAttributes, "保存任务明细成功");
return "redirect:" + Global.getAdminPath() + "/qrtz/qrtzJobDetails/?repage";
}
@RequiresPermissions("qrtz:qrtzJobDetails:edit")
@RequestMapping(value = "delete")
public String delete(QrtzJobDetails qrtzJobDetails, RedirectAttributes redirectAttributes) {
qrtzJobDetailsService.delete(qrtzJobDetails);
addMessage(redirectAttributes, "删除触发器Job明细成功");
return "redirect:" + Global.getAdminPath() + "/qrtz/qrtzJobDetails/?repage";
}
} | copy4dev/jee-base | src/main/java/com/cn/jee/modules/qrtz/web/QrtzJobDetailsController.java | Java | apache-2.0 | 3,376 |
package gaia3d.domain;
public enum YOrN {
Y,
N;
} | Gaia3D/mago3d | mago3d-user/src/main/java/gaia3d/domain/YOrN.java | Java | apache-2.0 | 52 |
module ZAWS
class External
class AWSCLI
class Commands
class ELB
class RegisterInstancesWithLoadBalancer
def initialize(shellout=nil, awscli=nil)
@shellout=shellout
@awscli=awscli
clear_settings
self
end
def aws
@aws ||= ZAWS::External::AWSCLI::Commands::AWS.new(self)
@aws
end
def clear_settings
@lbn=nil
@instances=nil
self
end
def instances(id)
@instances=id
self
end
def load_balancer_name(name)
@lbn=name
self
end
def get_command
command = "elb register-instances-with-load-balancer"
command = "#{command} --load-balancer-name #{@lbn}" if @lbn
command = "#{command} --instances #{@instances}" if @instances
return command
end
end
end
end
end
end
end
| zynxhealth/zaws | lib/zaws/external/awscli/commands/elb/register_instances_with_load_balancer.rb | Ruby | apache-2.0 | 1,154 |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
* @param {string} component - Component name
* @param {object[]} collection - Array of objects describing available content
* @return {object|false} Either a found item, or false.
*/
function componentHasDocs(component, collection) {
const item = collection.filter((c) => `${c.fileSlug}.njk` === component);
return item.length > 0 ? item[0] : false;
}
module.exports = (eleventy) => {
eleventy.addFilter('componentHasDocs', componentHasDocs);
};
| chromeos/chromeos.dev | lib/filters/component-has-docs.js | JavaScript | apache-2.0 | 1,060 |
package com.github.obourgain.elasticsearch.http.handler.document.termvectors;
import static com.github.obourgain.elasticsearch.http.TestFilesUtils.readFromClasspath;
import static org.assertj.core.api.Assertions.assertThat;
import org.elasticsearch.common.bytes.BytesArray;
import org.junit.Test;
import com.github.obourgain.elasticsearch.http.response.entity.TermVector;
import com.github.obourgain.elasticsearch.http.response.entity.TermVectorTest;
public class TermVectorResponseTest {
@Test
public void should_parse_response() throws Exception {
String json = readFromClasspath("json/termvector/termvector_response.json");
TermVectorResponse response = new TermVectorResponse().parse(new BytesArray(json));
assertTermVectorResponse(response);
}
public static void assertTermVectorResponse(TermVectorResponse response) {
assertThat(response.getId()).isEqualTo("1");
assertThat(response.getIndex()).isEqualTo("twitter");
assertThat(response.getType()).isEqualTo("tweet");
assertThat(response.getVersion()).isEqualTo(1);
TermVector termVector = response.getTermVector();
TermVectorTest.assertTermVector(termVector);
TermVectorTest.assertFieldStatistics(termVector);
}
} | obourgain/elasticsearch-http | src/test/java/com/github/obourgain/elasticsearch/http/handler/document/termvectors/TermVectorResponseTest.java | Java | apache-2.0 | 1,278 |
/*
* Copyright (C) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.googlecode.eyesfree.brailleback;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.PowerManager;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v4.view.accessibility.AccessibilityNodeInfoCompat;
import android.text.Spanned;
import android.util.Log;
import com.googlecode.eyesfree.braille.display.BrailleInputEvent;
import com.googlecode.eyesfree.braille.display.Display;
import com.googlecode.eyesfree.braille.display.DisplayClient;
import com.googlecode.eyesfree.braille.translate.BrailleTranslator;
import com.googlecode.eyesfree.braille.translate.TranslationResult;
import com.googlecode.eyesfree.brailleback.wrapping.SimpleWrapStrategy;
import com.googlecode.eyesfree.brailleback.wrapping.WordWrapStrategy;
import com.googlecode.eyesfree.brailleback.wrapping.WrapStrategy;
import com.googlecode.eyesfree.utils.AccessibilityNodeInfoUtils;
import com.googlecode.eyesfree.utils.LogUtils;
import com.googlecode.eyesfree.utils.SharedPreferencesUtils;
import java.util.Arrays;
import java.util.Comparator;
/**
* Keeps track of the current display content and handles panning.
*/
public class DisplayManager
implements Display.OnConnectionStateChangeListener,
Display.OnInputEventListener,
TranslatorManager.OnTablesChangedListener,
SharedPreferences.OnSharedPreferenceChangeListener {
/** Dot pattern used to overlay characters under a selection. */
// TODO: Make customizable.
private static final int SELECTION_DOTS = 0xC0;
/** Dot pattern used to overlay characters in a focused element. */
// TODO: Make customizable.
private static final int FOCUS_DOTS = 0xC0;
private static final long BLINK_OFF_MILLIS = 800;
private static final long BLINK_ON_MILLIS = 600;
/**
* Callback interface for notifying interested callers when the display is
* panned out of the available content. A typical reaction to such an
* event would be to move focus to a different area of the screen and
* display it.
*/
public interface OnPanOverflowListener {
void onPanLeftOverflow(Content content);
void onPanRightOverflow(Content content);
}
/**
* Listener for input events that also get information about the current
* display content and position mapping for commands with a positional
* argument.
*/
public interface OnMappedInputEventListener {
/**
* Handles an input {@code event} that was received when
* {@code content} was present on the display.
*
* If the input event has a positional argument, it is mapped
* according to the display pan position in the content so that
* it corresponds to the character that the user touched.
*
* {@code event} and {@code content} are owned by the caller and may
* not be referenced after this method returns.
*
* NOTE: Since the display is updated asynchronously, there is a chance
* that the actual content on the display when the user invoked
* the command is different from {@code content}.
*/
void onMappedInputEvent(BrailleInputEvent event, Content content);
}
/**
* Builder-like class used to construct the content to put on the display.
*
* This object contains a {@link CharSequence} that represents what
* characters to put on the display. This sequence can be a
* {@link Spannable} so that the characters can be annotated with
* information about cursors and focus which will affect how the content
* is presented on the display. Arbitrary java objects may also be
* included in the {@link Spannable} which can be used to determine what
* action to take when the user invokes key commands related to a
* particular position on the display (i.e. involving a cursor routing
* key). In particular, {@link AccessibilityNodeInfoCompat}s may be
* included, in which case they will be recycled by the
* {@link Content#recycle} method. To facilitate movement outside the
* bounds of the current {@link Content},
* {@link AccessibilityNodeInfoCompat}s that represent the extent of the
* content can also be added, but in that case, they are not included in
* the {@link Spannable}.
*/
public static class Content {
/**
* Pan strategy that moves the display to the leftmost position.
* This is the default panning strategy.
*/
public static final int PAN_RESET = 0;
/**
* Pan strategy that positions the display so that it overlaps the
* start of a selection or focus mark. Falls back on {@code PAN_RESET}
* if there is no selection or focus.
*/
public static final int PAN_CURSOR = 1;
/**
* Pan strategy that tries to position the display close to the
* position that corresponds to the panning position in the previously
* displayed content. Spans of type
* {@link AccessibilityNodeInfoCompat} are used to identify the
* corresponding content in the old and new display content.
* Falls back on {@code SPAN_CURSOR} if a corresponding position can't
* be found.
*/
public static final int PAN_KEEP = 2;
/**
* Default contraction behaviour, allow contractions unless there is a
* selection span in the content.
*/
public static final int CONTRACT_DEFAULT = 0;
/**
* Allow contraction, regardless of the presence of a selection
* span.
*/
public static final int CONTRACT_ALWAYS_ALLOW = 1;
private CharSequence text;
private AccessibilityNodeInfoCompat firstNode;
private AccessibilityNodeInfoCompat lastNode;
private int panStrategy;
private int contractionMode;
private boolean splitParagraphs;
private boolean editable = false;
public Content() {
}
/** Shortcut to just set text for a one-off use. */
public Content(CharSequence textArg) {
text = textArg;
}
public Content setText(CharSequence textArg) {
text = textArg;
return this;
}
public CharSequence getText() {
return text;
}
public Spanned getSpanned() {
if (text instanceof Spanned) {
return (Spanned) text;
}
return null;
}
public Content setFirstNode(AccessibilityNodeInfoCompat node) {
AccessibilityNodeInfoUtils.recycleNodes(firstNode);
firstNode = AccessibilityNodeInfoCompat.obtain(node);
return this;
}
public AccessibilityNodeInfoCompat getFirstNode() {
return firstNode;
}
public Content setLastNode(AccessibilityNodeInfoCompat node) {
AccessibilityNodeInfoUtils.recycleNodes(lastNode);
lastNode = AccessibilityNodeInfoCompat.obtain(node);
return this;
}
public AccessibilityNodeInfoCompat getLastNode() {
return lastNode;
}
public Content setPanStrategy(int strategy) {
panStrategy = strategy;
return this;
}
public int getPanStrategy() {
return panStrategy;
}
public Content setContractionMode(int mode) {
contractionMode = mode;
return this;
}
public int getContractionMode() {
return contractionMode;
}
public Content setSplitParagraphs(boolean value) {
splitParagraphs = value;
return this;
}
public boolean isSplitParagraphs() {
return splitParagraphs;
}
public Content setEditable(boolean value) {
editable = value;
return this;
}
public boolean isEditable() {
return editable;
}
/**
* Translates the text content, preserving any verbatim braille that is embedded in a
* BrailleSpan. The current implementation of this method only handles the first BrailleSpan;
* all subsequent BrailleSpans are ignored.
*
* @param translator The translator used for translating the subparts of the text without
* embedded BrailleSpans.
* @param cursorPosition The position of the cursor; if it occurs in a section of the text
* without BrailleSpans, then the final cursor position in the output braille by the
* translator. Otherwise, if the cursor occurs within a BrailleSpan section, the final
* cursor position in the output braille is set to the first braille cell of the
* BrailleSpan.
* @param computerBrailleAtCursor This parameter is passed through to the translator; if
* true,then contracted translators are instructed to translate the word under the cursor
* using computer braille (instead of contracted braille) to make editing easier.
* @return The result of translation, possibly empty, not null.
*/
public TranslationResult translateWithVerbatimBraille(
BrailleTranslator translator, int cursorPosition, boolean computerBrailleAtCursor) {
if (translator == null) {
return createEmptyTranslation(text);
}
// Assume that we have at most one BrailleSpan since we currently
// never add more than one BrailleSpan.
// Also ignore BrailleSpans with zero-length span or no braille for
// now because we don't currently add such BrailleSpans.
DisplaySpans.BrailleSpan brailleSpan = null;
int start = -1;
int end = -1;
if (text instanceof Spanned) {
Spanned spanned = (Spanned) text;
DisplaySpans.BrailleSpan[] spans = spanned.getSpans(
0, spanned.length(), DisplaySpans.BrailleSpan.class);
if (spans.length > 1) {
LogUtils.log(this, Log.WARN,
"More than one BrailleSpan, handling first only");
}
if (spans.length != 0) {
int spanStart = spanned.getSpanStart(spans[0]);
int spanEnd = spanned.getSpanEnd(spans[0]);
if (spans[0].braille != null && spans[0].braille.length != 0
&& spanStart < spanEnd) {
brailleSpan = spans[0];
start = spanStart;
end = spanEnd;
}
}
}
if (brailleSpan != null) {
// Chunk the text into three sections:
// left: [0, start) - needs translation
// mid: [start, end) - use the literal braille provided
// right: [end, length) - needs translation
CharSequence left = text.subSequence(0, start);
TranslationResult leftTrans = translator.translate(
left.toString(),
cursorPosition < start ? cursorPosition : -1,
cursorPosition < start && computerBrailleAtCursor);
CharSequence right = text.subSequence(end, text.length());
TranslationResult rightTrans = translator.translate(
right.toString(),
cursorPosition >= end ? cursorPosition - end : -1,
cursorPosition >= end && computerBrailleAtCursor);
// If one of the left or right translations is not valid, then
// we will fall back by ignoring the BrailleSpan and
// translating everything normally. (Chances are that
// translating the whole text will fail also, but it wouldn't
// hurt to try.)
if (leftTrans == null || rightTrans == null) {
LogUtils.log(this, Log.ERROR,
"Could not translate left or right subtranslation, "
+ "falling back on default translation");
return translateOrDefault(translator, cursorPosition,
computerBrailleAtCursor);
}
int startBraille = leftTrans.getCells().length;
int endBraille = startBraille + brailleSpan.braille.length;
int totalBraille = endBraille + rightTrans.getCells().length;
// Copy braille cells.
byte[] cells = new byte[totalBraille];
System.arraycopy(leftTrans.getCells(), 0,
cells, 0, leftTrans.getCells().length);
System.arraycopy(brailleSpan.braille, 0,
cells, startBraille, brailleSpan.braille.length);
System.arraycopy(rightTrans.getCells(), 0,
cells, endBraille, rightTrans.getCells().length);
// Copy text-to-braille indices.
int[] leftTtb = leftTrans.getTextToBraillePositions();
int[] rightTtb = rightTrans.getTextToBraillePositions();
int[] textToBraille = new int[text.length()];
System.arraycopy(leftTtb, 0, textToBraille, 0, start);
for (int i = start; i < end; ++i) {
textToBraille[i] = startBraille;
}
for (int i = end; i < textToBraille.length; ++i) {
textToBraille[i] = endBraille + rightTtb[i - end];
}
// Copy braille-to-text indices.
int[] leftBtt = leftTrans.getBrailleToTextPositions();
int[] rightBtt = rightTrans.getBrailleToTextPositions();
int[] brailleToText = new int[cells.length];
System.arraycopy(leftBtt, 0, brailleToText, 0, startBraille);
for (int i = startBraille; i < endBraille; ++i) {
brailleToText[i] = start;
}
for (int i = endBraille; i < totalBraille; ++i) {
brailleToText[i] = end + rightBtt[i - endBraille];
}
// Get cursor.
int cursor;
if (cursorPosition < 0) {
cursor = -1;
} else if (cursorPosition < start) {
cursor = leftTrans.getCursorPosition();
} else if (cursorPosition < end) {
cursor = startBraille;
} else {
cursor = endBraille + rightTrans.getCursorPosition();
}
return new TranslationResult(cells, textToBraille,
brailleToText, cursor);
}
return translateOrDefault(translator, cursorPosition,
computerBrailleAtCursor);
}
private TranslationResult translateOrDefault(
@NonNull BrailleTranslator translator,
int cursorPosition,
boolean computerBrailleAtCursor) {
TranslationResult translation =
translator.translate(text.toString(), cursorPosition, computerBrailleAtCursor);
if (translation != null) {
return translation;
}
return createEmptyTranslation(text);
}
public void recycle() {
AccessibilityNodeInfoUtils.recycleNodes(firstNode, lastNode);
firstNode = lastNode = null;
DisplaySpans.recycleSpans(text);
text = null;
}
@Override
public String toString() {
return String.format("DisplayManager.Content {text=%s}", getText());
}
}
private final TranslatorManager translatorManager;
private final BrailleBackService context;
// Not final, because it is initialized in the handler thread.
private Display display;
private final OnPanOverflowListener panOverflowListener;
private final Display.OnConnectionStateChangeListener connectionStateChangeListener;
private final OnMappedInputEventListener mappedInputEventListener;
private final DisplayHandler displayHandler;
private final CallbackHandler callbackHandler;
private final HandlerThread handlerThread;
private final PowerManager.WakeLock wakeLock;
private final SharedPreferences sharedPreferences;
// Read and written in display handler thread only.
private boolean connected = false;
private volatile boolean isSimulatedDisplay = false;
/**
* Cursor position last passed to the translate method of the translator. We use this because it
* is more reliable than the position maps inside contracted words. In the common case where there
* is just one selection/focus on the display at the same time, this gives better results.
* Otherwise, we fall back on the position map, whic is also used for keeping the pan position.
*/
private int cursorPositionToTranslate = 0;
private TranslationResult currentTranslationResult = createEmptyTranslation(null);
/** Display content without overlays for cursors, focus etc. */
private byte[] brailleContent = new byte[0];
/** Braille content, potentially with dots overlaid for cursors and focus. */
private byte[] overlaidBrailleContent = brailleContent;
private boolean overlaysOn;
private WrapStrategy wrapStrategy;
private final WrapStrategy editingWrapStrategy = new SimpleWrapStrategy();
private WrapStrategy preferredWrapStrategy = new SimpleWrapStrategy();
private Content currentContent = new Content("");
// Displayed content, already trimmed based on the display position.
// Updated in updateDisplayedContent() and used in refresh().
private byte[] displayedBraille = new byte[0];
private byte[] displayedOverlaidBraille = new byte[0];
private CharSequence displayedText = "";
private int[] displayedBrailleToTextPositions = new int[0];
private boolean blinkNeeded = false;
/**
* Creates an instance of this class and starts the internal thread to connect to the braille
* display service. {@code contextArg} is used to connect to the display service. {@code
* translator} is used for braille translation. The various listeners will be called as
* appropriate and on the same thread that was used to create this object. The current thread must
* have a prepared looper.
*/
@SuppressLint("InvalidWakeLockTag")
public DisplayManager(
TranslatorManager translatorManagerArg,
BrailleBackService contextArg,
OnPanOverflowListener panOverflowListenerArg,
Display.OnConnectionStateChangeListener connectionStateChangeListenerArg,
OnMappedInputEventListener mappedInputEventListenerArg) {
translatorManager = translatorManagerArg;
translatorManager.addOnTablesChangedListener(this);
context = contextArg;
panOverflowListener = panOverflowListenerArg;
connectionStateChangeListener = connectionStateChangeListenerArg;
mappedInputEventListener = mappedInputEventListenerArg;
PowerManager pm = (PowerManager) contextArg.getSystemService(Context.POWER_SERVICE);
wakeLock =
pm.newWakeLock(
PowerManager.SCREEN_DIM_WAKE_LOCK | PowerManager.ON_AFTER_RELEASE, "BrailleBack");
handlerThread =
new HandlerThread("DisplayManager") {
@Override
public void onLooperPrepared() {
display = new OverlayDisplay(context, new DisplayClient(context));
display.setOnConnectionStateChangeListener(DisplayManager.this);
display.setOnInputEventListener(DisplayManager.this);
}
};
handlerThread.start();
displayHandler = new DisplayHandler(handlerThread.getLooper());
callbackHandler = new CallbackHandler();
sharedPreferences = PreferenceManager.getDefaultSharedPreferences(contextArg);
sharedPreferences.registerOnSharedPreferenceChangeListener(this);
updateWrapStrategyFromPreferences();
}
public void shutdown() {
sharedPreferences.unregisterOnSharedPreferenceChangeListener(this);
displayHandler.stop();
// Block on display shutdown. We need to make sure this finishes before
// we can consider DisplayManager to be shut down.
try {
handlerThread.join(1000 /*milis*/);
} catch (InterruptedException e) {
LogUtils.log(this, Log.WARN,
"Display handler shutdown interrupted");
}
translatorManager.removeOnTablesChangedListener(this);
}
/**
* Asynchronously updates the display to reflect {@code content}.
* {@code content} must not be modified after this function is called, and
* will eventually be recycled by the display manager.
*/
public void setContent(Content content) {
if (content == null) {
throw new NullPointerException("content can't be null");
}
if (content.text == null) {
throw new NullPointerException("content text is null");
}
displayHandler.setContent(content);
}
/** Returns true if the current display is simulated. */
public boolean isSimulatedDisplay() {
return isSimulatedDisplay;
}
/**
* Marks selection spans in the overlaid braille, and returns the position
* in braille where the first selection begins. If there are no selection
* spans, returns -1.
*/
private int markSelection(Spanned spanned) {
DisplaySpans.SelectionSpan[] spans =
spanned.getSpans(0, spanned.length(),
DisplaySpans.SelectionSpan.class);
int selectionStart = -1;
for (DisplaySpans.SelectionSpan span : spans) {
int start =
textToDisplayPosition(
currentTranslationResult, cursorPositionToTranslate, spanned.getSpanStart(span));
int end =
textToDisplayPosition(
currentTranslationResult, cursorPositionToTranslate, spanned.getSpanEnd(span));
if (start == -1 || end == -1) {
return -1;
}
if (start == end) {
end = start + 1;
}
if (end > brailleContent.length) {
extendContentForCursor();
}
copyOverlaidContent();
for (int i = start; i < end && i < overlaidBrailleContent.length; ++i) {
overlaidBrailleContent[i] |= (byte) SELECTION_DOTS;
}
if (selectionStart == -1) {
selectionStart = start;
}
}
return selectionStart;
}
/**
* Makes sure that the overlaid content has its own copy. Call before
* adding overlay dots.
*/
private void copyOverlaidContent() {
if (overlaidBrailleContent == brailleContent) {
overlaidBrailleContent = brailleContent.clone();
}
}
private void extendContentForCursor() {
brailleContent = Arrays.copyOf(brailleContent, brailleContent.length + 1);
// Always create a new copy of the overlaid content because there will
// be a cursor, so we will need a copy anyway.
overlaidBrailleContent =
Arrays.copyOf(overlaidBrailleContent, overlaidBrailleContent.length + 1);
}
/**
* Marks focus spans in the overlaid braille, and returns the position in
* braille where the first focus begins. If there are no focus spans,
* returns -1.
*/
private int markFocus(Spanned spanned) {
DisplaySpans.FocusSpan[] spans =
spanned.getSpans(0, spanned.length(),
DisplaySpans.FocusSpan.class);
int focusStart = -1;
for (DisplaySpans.FocusSpan span : spans) {
int start =
textToDisplayPosition(
currentTranslationResult, cursorPositionToTranslate, spanned.getSpanStart(span));
if (start >= 0 && start < overlaidBrailleContent.length) {
copyOverlaidContent();
overlaidBrailleContent[start] |= (byte) FOCUS_DOTS;
if (focusStart == -1) {
focusStart = start;
}
}
}
return focusStart;
}
@Override
public void onConnectionStateChanged(int state) {
if (state == Display.STATE_CONNECTED) {
connected = true;
displayHandler.retranslate();
} else {
connected = false;
}
isSimulatedDisplay = display.isSimulated();
callbackHandler.onConnectionStateChanged(state);
}
@Override
public void onInputEvent(BrailleInputEvent event) {
keepAwake();
LogUtils.log(this, Log.VERBOSE, "InputEvent: %s", event);
// We're called from within the handler thread, so we forward
// the call only if we are going to invoke the user's callback.
switch (event.getCommand()) {
case BrailleInputEvent.CMD_NAV_PAN_LEFT:
panLeft();
break;
case BrailleInputEvent.CMD_NAV_PAN_RIGHT:
panRight();
break;
default:
sendMappedEvent(event);
break;
}
}
@Override
public void onTablesChanged() {
displayHandler.retranslate();
}
private void sendMappedEvent(BrailleInputEvent event) {
if (BrailleInputEvent.argumentType(event.getCommand())
== BrailleInputEvent.ARGUMENT_POSITION) {
int oldArgument = event.getArgument();
// Offset argument by pan position and make sure it is less than
// the next split position.
int offsetArgument = oldArgument + wrapStrategy.getDisplayStart();
if (offsetArgument >= wrapStrategy.getDisplayEnd()) {
// The event is outisde the currently displayed
// content, drop the event.
return;
}
// The mapped event argument is the translated offset argument.
int newArgument =
displayToTextPosition(
currentTranslationResult, cursorPositionToTranslate, offsetArgument);
// Create a new event if the argument actually differs.
if (newArgument != oldArgument) {
event = new BrailleInputEvent(event.getCommand(),
newArgument, event.getEventTime());
}
}
callbackHandler.onMappedInputEvent(event);
}
private void panLeft() {
if (wrapStrategy.panLeft()) {
updateDisplayedContent();
} else {
callbackHandler.onPanLeftOverflow();
}
}
private void panRight() {
if (wrapStrategy.panRight()) {
updateDisplayedContent();
} else {
callbackHandler.onPanRightOverflow();
}
}
private class DisplayHandler extends Handler {
private static final int MSG_SET_CONTENT = 1;
private static final int MSG_RETRANSLATE = 2;
private static final int MSG_PULSE = 3;
private static final int MSG_STOP = 4;
public DisplayHandler(Looper looper) {
super(looper);
}
public void setContent(Content content) {
obtainMessage(MSG_SET_CONTENT, content).sendToTarget();
}
public void retranslate() {
sendEmptyMessage(MSG_RETRANSLATE);
}
public void schedulePulse() {
if (hasMessages(MSG_PULSE)) {
return;
}
sendEmptyMessageDelayed(MSG_PULSE, overlaysOn ? BLINK_ON_MILLIS : BLINK_OFF_MILLIS);
}
public void cancelPulse() {
removeMessages(MSG_PULSE);
overlaysOn = true;
}
public void stop() {
sendEmptyMessage(MSG_STOP);
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_SET_CONTENT:
handleSetContent((Content) msg.obj);
break;
case MSG_RETRANSLATE:
handleRetranslate();
break;
case MSG_PULSE:
handlePulse();
break;
case MSG_STOP:
handleStop();
break;
default:
// Fall out.
}
}
private void handleSetContent(Content content) {
Content oldContent = currentContent;
currentContent = content;
updateWrapStrategy();
cursorPositionToTranslate = findCursorPosition(content);
TranslationResult oldTranslationResult = currentTranslationResult;
int oldDisplayStart = wrapStrategy.getDisplayStart();
translateCurrentContent();
cancelPulse();
// Adjust the pan position according to the panning strategy.
// Setting the position to -1 below means that the cursor position
// returned by markCursor() will be used instead; if the pan
// position is >= 0, then the cursor position will be ignored.
// If the pan position is -1 and the cursor position is also -1
// (no cursor), then the wrap strategy will reset the display to the
// beginning of the line.
int panPosition = -1;
switch (content.panStrategy) {
case Content.PAN_RESET:
panPosition = 0;
break;
case Content.PAN_KEEP:
if (oldContent != null) {
// We don't align the display position to the size of
// the display in this case so that content doesn't
// jump around on the dipslay if content before the
// current display position changes size.
panPosition =
findMatchingPanPosition(
oldContent,
content,
oldTranslationResult,
currentTranslationResult,
oldDisplayStart);
}
break;
case Content.PAN_CURSOR:
break;
default:
LogUtils.log(this, Log.ERROR, "Unknown pan strategy: %d", content.panStrategy);
}
int cursorPosition = markCursor();
if (panPosition >= 0) {
wrapStrategy.panTo(panPosition, false);
} else {
wrapStrategy.panTo(cursorPosition, true);
}
updateDisplayedContent();
if (oldContent != null) {
// Have the callback handler recycle the old content so that
// the thread in which the callbck handler is running is the
// only thread modifying it. It is safe for the callback
// thread to recycle the event when it receives this message
// because the display handler thread will not send any more
// input event containing this content and the events that
// have already been sent will be processed by trhe callback
// thread before the recycle message arrives because of the
// guaranteed ordering of message handling.
callbackHandler.recycleContent(oldContent);
}
}
private void handleRetranslate() {
if (currentContent == null) {
return;
}
int oldTextPosition =
displayToTextPosition(
currentTranslationResult, cursorPositionToTranslate, wrapStrategy.getDisplayStart());
translateCurrentContent();
int panPosition =
textToDisplayPosition(
currentTranslationResult, cursorPositionToTranslate, oldTextPosition);
int cursorPosition = markCursor();
if (panPosition >= 0) {
wrapStrategy.panTo(panPosition, false);
} else {
wrapStrategy.panTo(cursorPosition, true);
}
cancelPulse();
updateDisplayedContent();
}
private void handlePulse() {
overlaysOn = !overlaysOn;
refresh();
}
private void handleStop() {
display.shutdown();
handlerThread.quit();
}
}
private static class OnMappedInputEventArgs {
public BrailleInputEvent event;
public Content content;
public OnMappedInputEventArgs(BrailleInputEvent eventArg, Content contentArg) {
event = eventArg;
content = contentArg;
}
}
private class CallbackHandler extends Handler {
private static final int MSG_ON_CONNECTION_STATE_CHANGED = 1;
private static final int MSG_ON_MAPPED_INPUT_EVENT = 2;
private static final int MSG_ON_PAN_LEFT_OVERFLOW = 3;
private static final int MSG_ON_PAN_RIGHT_OVERFLOW = 4;
private static final int MSG_RECYCLE_CONTENT = 5;
public void onConnectionStateChanged(int state) {
obtainMessage(MSG_ON_CONNECTION_STATE_CHANGED, state, 0)
.sendToTarget();
}
public void onMappedInputEvent(BrailleInputEvent event) {
OnMappedInputEventArgs args = new OnMappedInputEventArgs(event, currentContent);
obtainMessage(MSG_ON_MAPPED_INPUT_EVENT, args).sendToTarget();
}
public void onPanLeftOverflow() {
obtainMessage(MSG_ON_PAN_LEFT_OVERFLOW, currentContent).sendToTarget();
}
public void onPanRightOverflow() {
obtainMessage(MSG_ON_PAN_RIGHT_OVERFLOW, currentContent).sendToTarget();
}
public void recycleContent(Content content) {
obtainMessage(MSG_RECYCLE_CONTENT, content).sendToTarget();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_ON_CONNECTION_STATE_CHANGED:
handleOnConnectionStateChanged(msg.arg1);
break;
case MSG_ON_MAPPED_INPUT_EVENT:
OnMappedInputEventArgs args = (OnMappedInputEventArgs) msg.obj;
handleOnMappedInputEvent(args.event, args.content);
break;
case MSG_ON_PAN_LEFT_OVERFLOW:
handleOnPanLeftOverflow((Content) msg.obj);
break;
case MSG_ON_PAN_RIGHT_OVERFLOW:
handleOnPanRightOverflow((Content) msg.obj);
break;
case MSG_RECYCLE_CONTENT:
handleRecycleContent((Content) msg.obj);
break;
default:
// Fall out.
}
}
private void handleOnConnectionStateChanged(int state) {
connectionStateChangeListener.onConnectionStateChanged(state);
}
private void handleOnMappedInputEvent(BrailleInputEvent event,
Content content) {
mappedInputEventListener.onMappedInputEvent(event, content);
}
private void handleOnPanLeftOverflow(Content content) {
panOverflowListener.onPanLeftOverflow(content);
}
private void handleOnPanRightOverflow(Content content) {
panOverflowListener.onPanRightOverflow(content);
}
private void handleRecycleContent(Content content) {
content.recycle();
}
}
private void translateCurrentContent() {
// Use the current translator, whether contracted or uncontracted, for
// editing text, but instruct contracted translaters to uncontract
// the braille for the word under the cursor.
BrailleTranslator translator = translatorManager.getTranslator();
currentTranslationResult =
currentContent.translateWithVerbatimBraille(
translator, cursorPositionToTranslate, uncontractBrailleAtCursor(currentContent));
// Make very sure we do not call getCells() on a null translation.
// translateWithVerbatimBraille() currently should never return null.
if (currentTranslationResult == null) {
LogUtils.log(this, Log.ERROR, "currentTranslationResult is null");
currentTranslationResult = createEmptyTranslation(currentContent.getText());
}
wrapStrategy.setContent(currentContent, currentTranslationResult, getNumTextCells());
brailleContent = currentTranslationResult.getCells();
overlaidBrailleContent = brailleContent;
}
private static TranslationResult createEmptyTranslation(CharSequence text) {
int textLength = (text == null) ? 0 : text.length();
return new TranslationResult(new byte[0], new int[textLength], new int[0], 0);
}
/**
* Marks the selection or focus cursor (in that priority), and returns the
* position in braille of the selection or focus cursor if one exists. If no
* selection or focus cursor exists, then returns -1.
*/
private int markCursor() {
Spanned spanned = currentContent.getSpanned();
if (spanned != null) {
int selectionPosition = markSelection(spanned);
if (selectionPosition != -1) {
return selectionPosition;
}
int focusPosition = markFocus(spanned);
if (focusPosition != -1) {
return focusPosition;
}
}
return -1;
}
private void updateDisplayedContent() {
if (!connected || currentContent == null) {
return;
}
int displayStart = wrapStrategy.getDisplayStart();
int displayEnd = wrapStrategy.getDisplayEnd();
if (displayEnd < displayStart) {
return;
}
// Compute equivalent text and mapping.
int[] brailleToTextPositions = currentTranslationResult.getBrailleToTextPositions();
int textLeft = displayStart >= brailleToTextPositions.length
? 0
: brailleToTextPositions[displayStart];
int textRight =
displayEnd >= brailleToTextPositions.length
? currentContent.text.length()
: brailleToTextPositions[displayEnd];
// TODO: Prevent out of order brailleToTextPositions.
if (textRight < textLeft) {
textRight = textLeft;
}
StringBuilder newText = new StringBuilder(currentContent.text.subSequence(textLeft, textRight));
int[] trimmedBrailleToTextPositions =
new int[displayEnd - displayStart];
for (int i = 0; i < trimmedBrailleToTextPositions.length; i++) {
if (displayStart + i < brailleToTextPositions.length) {
trimmedBrailleToTextPositions[i] =
brailleToTextPositions[displayStart + i] - textLeft;
} else {
trimmedBrailleToTextPositions[i] = newText.length();
newText.append(' ');
}
}
// Store all data needed by refresh().
displayedBraille = Arrays.copyOfRange(brailleContent, displayStart, displayEnd);
if (brailleContent != overlaidBrailleContent) {
displayedOverlaidBraille =
Arrays.copyOfRange(overlaidBrailleContent, displayStart, displayEnd);
} else {
displayedOverlaidBraille = displayedBraille;
}
displayedText = newText.toString();
displayedBrailleToTextPositions = trimmedBrailleToTextPositions;
blinkNeeded = blinkNeeded();
refresh();
}
private void refresh() {
if (!connected) {
return;
}
byte[] toDisplay = overlaysOn ? displayedOverlaidBraille : displayedBraille;
display.displayDots(toDisplay, displayedText, displayedBrailleToTextPositions);
if (blinkNeeded) {
displayHandler.schedulePulse();
} else {
displayHandler.cancelPulse();
}
}
/**
* Returns {@code true} if the current display content is such that it
* requires blinking.
*/
private boolean blinkNeeded() {
if (brailleContent == overlaidBrailleContent) {
return false;
}
int start = wrapStrategy.getDisplayStart();
int end = wrapStrategy.getDisplayEnd();
for (int i = start; i < end; ++i) {
if (brailleContent[i] != overlaidBrailleContent[i]) {
return true;
}
}
return false;
}
/**
* Keeps the phone awake as if there was a 'user activity' registered
* by the system.
*/
private void keepAwake() {
// Acquiring the lock and immediately releasing it keesp the phone
// awake. We don't use aqcuire() with a timeout because it just
// adds an unnecessary context switch.
wakeLock.acquire();
wakeLock.release();
}
/**
* Returns the size of the connected display, or {@code 1} if
* no display is connected.
*/
private int getNumTextCells() {
if (!connected) {
return 1;
}
return display.getDisplayProperties().getNumTextCells();
}
private int findMatchingPanPosition(
Content oldContent, Content newContent,
TranslationResult oldTranslationResult,
TranslationResult newTranslationResult,
int oldDisplayPosition) {
Spanned oldSpanned = oldContent.getSpanned();
Spanned newSpanned = newContent.getSpanned();
if (oldSpanned == null || newSpanned == null) {
return -1;
}
// Map the current display start and past-the-end positions
// to the corresponding input positions.
int oldTextStart = displayToTextPosition(oldTranslationResult,
-1 /*cursorPosition*/, oldDisplayPosition);
int oldTextEnd = displayToTextPosition(oldTranslationResult,
-1 /*cursorPosition*/, oldDisplayPosition + getNumTextCells());
// Find the nodes that overlap with the display.
AccessibilityNodeInfoCompat[] displayedNodes =
oldSpanned.getSpans(oldTextStart, oldTextEnd,
AccessibilityNodeInfoCompat.class);
Arrays.sort(displayedNodes,
new ByDistanceComparator(oldSpanned, oldTextStart));
// Find corresponding node in new content.
for (AccessibilityNodeInfoCompat oldNode : displayedNodes) {
AccessibilityNodeInfoCompat newNode = (AccessibilityNodeInfoCompat)
DisplaySpans.getEqualSpan(newSpanned, oldNode);
if (newNode == null) {
continue;
}
int oldDisplayStart = textToDisplayPosition(oldTranslationResult,
-1 /*cursorPosition*/, oldSpanned.getSpanStart(oldNode));
int newDisplayStart = textToDisplayPosition(newTranslationResult,
-1 /*cursorPosition*/, newSpanned.getSpanStart(newNode));
// TODO: If crashes happen here, return -1 when *DisplayStart == -1.
// Offset position according to diff in node position.
int newDisplayPosition = oldDisplayPosition + (newDisplayStart - oldDisplayStart);
return newDisplayPosition;
}
return -1;
}
private static class ByDistanceComparator
implements Comparator<AccessibilityNodeInfoCompat> {
private final Spanned spanned;
private final int start;
public ByDistanceComparator(Spanned spannedArg, int startArg) {
spanned = spannedArg;
start = startArg;
}
@Override
public int compare(
AccessibilityNodeInfoCompat a,
AccessibilityNodeInfoCompat b) {
int aStart = spanned.getSpanStart(a);
int bStart = spanned.getSpanStart(b);
int aDist = Math.abs(start - aStart);
int bDist = Math.abs(start - bStart);
if (aDist != bDist) {
return aDist - bDist;
}
// They are on the same distance, compare by length.
int aLength = aStart + spanned.getSpanEnd(a);
int bLength = bStart + spanned.getSpanEnd(b);
return aLength - bLength;
}
}
/** Returns braille character index of a text character index. May return -1. */
private static int textToDisplayPosition(
TranslationResult translationResult, int cursorPosition, int textPosition) {
if (textPosition == cursorPosition) {
return translationResult.getCursorPosition(); // May return -1?
}
int[] posMap = translationResult.getTextToBraillePositions(); // May include -1?
// Any position past-the-end of the position map maps to the
// corresponding past-the-end position in the braille.
if (textPosition >= posMap.length) {
return translationResult.getBrailleToTextPositions().length;
}
return posMap[textPosition];
}
private static int displayToTextPosition(
TranslationResult translationResult,
int cursorPosition,
int displayPosition) {
if (displayPosition == translationResult.getCursorPosition()) {
return cursorPosition;
}
int[] posMap = translationResult.getBrailleToTextPositions();
// Any position past-the-end of the position map maps to the
// corresponding past-the-end position in the braille.
if (displayPosition >= posMap.length) {
return translationResult.getTextToBraillePositions().length;
}
return posMap[displayPosition];
}
private static int findCursorPosition(Content content) {
Spanned spanned = content.getSpanned();
if (spanned == null) {
return -1;
}
DisplaySpans.SelectionSpan[] selectionSpans =
spanned.getSpans(0, spanned.length(),
DisplaySpans.SelectionSpan.class);
if (selectionSpans.length > 0) {
return spanned.getSpanStart(selectionSpans[0]);
}
DisplaySpans.FocusSpan[] focusSpans =
spanned.getSpans(0, spanned.length(),
DisplaySpans.FocusSpan.class);
if (focusSpans.length > 0) {
return spanned.getSpanStart(focusSpans[0]);
}
return -1;
}
private boolean uncontractBrailleAtCursor(Content content) {
if (content.getContractionMode() == Content.CONTRACT_ALWAYS_ALLOW) {
return false;
}
Spanned spanned = content.getSpanned();
if (spanned == null) {
return false;
}
DisplaySpans.SelectionSpan[] selectionSpans =
spanned.getSpans(0, spanned.length(),
DisplaySpans.SelectionSpan.class);
return selectionSpans.length != 0;
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferencesArg, String s) {
String wordWrapPrefKey = context.getString(R.string.pref_braille_word_wrap_key);
if (s != null && s.equals(wordWrapPrefKey)) {
updateWrapStrategyFromPreferences();
}
}
private void updateWrapStrategyFromPreferences() {
boolean wrap =
SharedPreferencesUtils.getBooleanPref(
sharedPreferences,
context.getResources(),
R.string.pref_braille_word_wrap_key,
R.bool.pref_braille_word_wrap_default);
preferredWrapStrategy = wrap ? new WordWrapStrategy() : new SimpleWrapStrategy();
updateWrapStrategy();
displayHandler.retranslate();
}
private void updateWrapStrategy() {
boolean contentEditable = currentContent != null && currentContent.isEditable();
boolean imeOpen =
context != null
&& context.imeNavigationMode != null
&& context.imeNavigationMode.isImeOpen();
boolean editing = contentEditable && imeOpen;
wrapStrategy = editing ? editingWrapStrategy : preferredWrapStrategy;
}
}
| google/brailleback | braille/brailleback/src/com/googlecode/eyesfree/brailleback/DisplayManager.java | Java | apache-2.0 | 48,343 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.storage.am.lsm.invertedindex.util;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.SortedSet;
import java.util.TreeSet;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.ErrorCode;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.primitive.IntegerPointable;
import org.apache.hyracks.data.std.util.GrowableArray;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
import org.apache.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
import org.apache.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import org.apache.hyracks.storage.am.btree.OrderedIndexTestUtils;
import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
import org.apache.hyracks.storage.am.common.CheckTuple;
import org.apache.hyracks.storage.am.common.datagen.DocumentStringFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.IFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.PersonNameFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.SortedIntegerFieldValueGenerator;
import org.apache.hyracks.storage.am.common.datagen.TupleGenerator;
import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndex;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifier;
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedListCursor;
import org.apache.hyracks.storage.am.lsm.invertedindex.common.LSMInvertedIndexTestHarness;
import org.apache.hyracks.storage.am.lsm.invertedindex.search.InvertedIndexSearchPredicate;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.HashedUTF8WordTokenFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizer;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IToken;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizerFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8NGramTokenFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
import org.apache.hyracks.storage.am.lsm.invertedindex.util.LSMInvertedIndexTestContext.InvertedIndexType;
import org.apache.hyracks.storage.common.IIndexBulkLoader;
import org.apache.hyracks.storage.common.IIndexCursor;
import org.apache.hyracks.storage.common.MultiComparator;
@SuppressWarnings("rawtypes")
public class LSMInvertedIndexTestUtils {
public static final int TEST_GRAM_LENGTH = 3;
public static TupleGenerator createStringDocumentTupleGen(Random rnd) throws IOException {
IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
fieldGens[0] = new DocumentStringFieldValueGenerator(2, 10, 10000, rnd);
fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE };
TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
return tupleGen;
}
public static TupleGenerator createPersonNamesTupleGen(Random rnd) throws IOException {
IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[2];
fieldGens[0] = new PersonNameFieldValueGenerator(rnd, 0.5f);
fieldGens[1] = new SortedIntegerFieldValueGenerator(0);
ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE };
TupleGenerator tupleGen = new TupleGenerator(fieldGens, fieldSerdes, 0);
return tupleGen;
}
private static ISerializerDeserializer[] getNonHashedIndexFieldSerdes(InvertedIndexType invIndexType)
throws HyracksDataException {
ISerializerDeserializer[] fieldSerdes = null;
switch (invIndexType) {
case INMEMORY:
case ONDISK:
case LSM: {
fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE };
break;
}
case PARTITIONED_INMEMORY:
case PARTITIONED_ONDISK:
case PARTITIONED_LSM: {
// Such indexes also include the set-size for partitioning.
fieldSerdes = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
ShortSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
break;
}
default: {
throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
}
}
return fieldSerdes;
}
private static ISerializerDeserializer[] getHashedIndexFieldSerdes(InvertedIndexType invIndexType)
throws HyracksDataException {
ISerializerDeserializer[] fieldSerdes = null;
switch (invIndexType) {
case INMEMORY:
case ONDISK:
case LSM: {
fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE };
break;
}
case PARTITIONED_INMEMORY:
case PARTITIONED_ONDISK:
case PARTITIONED_LSM: {
// Such indexes also include the set-size for partitioning.
fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
ShortSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
break;
}
default: {
throw new HyracksDataException("Unhandled inverted index type '" + invIndexType + "'.");
}
}
return fieldSerdes;
}
public static LSMInvertedIndexTestContext createWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new UTF8WordTokenFactory();
IBinaryTokenizerFactory tokenizerFactory =
new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createHashedWordInvIndexTestContext(LSMInvertedIndexTestHarness harness,
InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
IBinaryTokenizerFactory tokenizerFactory =
new DelimitedUTF8StringBinaryTokenizerFactory(true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getNonHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new UTF8NGramTokenFactory();
IBinaryTokenizerFactory tokenizerFactory =
new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static LSMInvertedIndexTestContext createHashedNGramInvIndexTestContext(LSMInvertedIndexTestHarness harness,
InvertedIndexType invIndexType) throws IOException, HyracksDataException {
ISerializerDeserializer[] fieldSerdes = getHashedIndexFieldSerdes(invIndexType);
ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
IBinaryTokenizerFactory tokenizerFactory =
new NGramUTF8StringBinaryTokenizerFactory(TEST_GRAM_LENGTH, true, true, false, tokenFactory);
LSMInvertedIndexTestContext testCtx = LSMInvertedIndexTestContext.create(harness, fieldSerdes,
fieldSerdes.length - 1, tokenizerFactory, invIndexType, null, null, null, null, null, null);
return testCtx;
}
public static void bulkLoadInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs,
boolean appendOnly) throws HyracksDataException, IOException {
SortedSet<CheckTuple> tmpMemIndex = new TreeSet<>();
// First generate the expected index by inserting the documents one-by-one.
for (int i = 0; i < numDocs; i++) {
ITupleReference tuple = tupleGen.next();
testCtx.insertCheckTuples(tuple, tmpMemIndex);
}
ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
// Use the expected index to bulk-load the actual index.
IIndexBulkLoader bulkLoader = testCtx.getIndex().createBulkLoader(1.0f, false, numDocs, true);
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(testCtx.getFieldSerdes().length);
ArrayTupleReference tuple = new ArrayTupleReference();
Iterator<CheckTuple> checkTupleIter = tmpMemIndex.iterator();
while (checkTupleIter.hasNext()) {
CheckTuple checkTuple = checkTupleIter.next();
OrderedIndexTestUtils.createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, fieldSerdes);
bulkLoader.add(tuple);
}
bulkLoader.end();
// Add all check tuples from the temp index to the text context.
testCtx.getCheckTuples().addAll(tmpMemIndex);
}
public static void insertIntoInvIndex(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, int numDocs)
throws IOException {
// InMemoryInvertedIndex only supports insert.
for (int i = 0; i < numDocs; i++) {
ITupleReference tuple = tupleGen.next();
testCtx.getIndexAccessor().insert(tuple);
testCtx.insertCheckTuples(tuple, testCtx.getCheckTuples());
}
}
public static void deleteFromInvIndex(LSMInvertedIndexTestContext testCtx, Random rnd, int numDocsToDelete)
throws HyracksDataException {
List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
for (int i = 0; i < numDocsToDelete && !documentCorpus.isEmpty(); i++) {
int size = documentCorpus.size();
int tupleIndex = Math.abs(rnd.nextInt()) % size;
ITupleReference deleteTuple = documentCorpus.get(tupleIndex);
testCtx.getIndexAccessor().delete(deleteTuple);
testCtx.deleteCheckTuples(deleteTuple, testCtx.getCheckTuples());
// Swap tupleIndex with last element.
documentCorpus.set(tupleIndex, documentCorpus.get(size - 1));
documentCorpus.remove(size - 1);
}
}
/**
* Compares actual and expected indexes using the rangeSearch() method of the inverted-index accessor.
*/
public static void compareActualAndExpectedIndexesRangeSearch(LSMInvertedIndexTestContext testCtx)
throws HyracksDataException {
IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
int tokenFieldCount = invIndex.getTokenTypeTraits().length;
int invListFieldCount = invIndex.getInvListTypeTraits().length;
IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) invIndex
.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IIndexCursor invIndexCursor = invIndexAccessor.createRangeSearchCursor();
MultiComparator tokenCmp = MultiComparator.create(invIndex.getTokenCmpFactories());
IBinaryComparatorFactory[] tupleCmpFactories =
new IBinaryComparatorFactory[tokenFieldCount + invListFieldCount];
for (int i = 0; i < tokenFieldCount; i++) {
tupleCmpFactories[i] = invIndex.getTokenCmpFactories()[i];
}
for (int i = 0; i < invListFieldCount; i++) {
tupleCmpFactories[tokenFieldCount + i] = invIndex.getInvListCmpFactories()[i];
}
MultiComparator tupleCmp = MultiComparator.create(tupleCmpFactories);
RangePredicate nullPred = new RangePredicate(null, null, true, true, tokenCmp, tokenCmp);
invIndexAccessor.rangeSearch(invIndexCursor, nullPred);
// Helpers for generating a serialized inverted-list element from a CheckTuple from the expected index.
ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
ArrayTupleBuilder expectedBuilder = new ArrayTupleBuilder(fieldSerdes.length);
ArrayTupleReference expectedTuple = new ArrayTupleReference();
Iterator<CheckTuple> expectedIter = testCtx.getCheckTuples().iterator();
// Compare index elements.
try {
while (invIndexCursor.hasNext() && expectedIter.hasNext()) {
invIndexCursor.next();
ITupleReference actualTuple = invIndexCursor.getTuple();
CheckTuple expected = expectedIter.next();
OrderedIndexTestUtils.createTupleFromCheckTuple(expected, expectedBuilder, expectedTuple, fieldSerdes);
if (tupleCmp.compare(actualTuple, expectedTuple) != 0) {
fail("Index entries differ for token '" + expected.getField(0) + "'.");
}
}
if (expectedIter.hasNext()) {
fail("Indexes do not match. Actual index is missing entries.");
}
if (invIndexCursor.hasNext()) {
fail("Indexes do not match. Actual index contains too many entries.");
}
} finally {
invIndexCursor.close();
}
}
/**
* Compares actual and expected indexes by comparing their inverted-lists one by one. Exercises the openInvertedListCursor() method of the inverted-index accessor.
*/
@SuppressWarnings("unchecked")
public static void compareActualAndExpectedIndexes(LSMInvertedIndexTestContext testCtx)
throws HyracksDataException {
IInvertedIndex invIndex = (IInvertedIndex) testCtx.getIndex();
ISerializerDeserializer[] fieldSerdes = testCtx.getFieldSerdes();
MultiComparator invListCmp = MultiComparator.create(invIndex.getInvListCmpFactories());
IInvertedIndexAccessor invIndexAccessor = (IInvertedIndexAccessor) testCtx.getIndexAccessor();
int tokenFieldCount = invIndex.getTokenTypeTraits().length;
int invListFieldCount = invIndex.getInvListTypeTraits().length;
// All tokens that were inserted into the indexes.
Iterator<Comparable> tokensIter = testCtx.getAllTokens().iterator();
// Search key for finding an inverted-list in the actual index.
ArrayTupleBuilder searchKeyBuilder = new ArrayTupleBuilder(tokenFieldCount);
ArrayTupleReference searchKey = new ArrayTupleReference();
// Cursor over inverted list from actual index.
IInvertedListCursor actualInvListCursor = invIndexAccessor.createInvertedListCursor();
// Helpers for generating a serialized inverted-list element from a CheckTuple from the expected index.
ArrayTupleBuilder expectedBuilder = new ArrayTupleBuilder(fieldSerdes.length);
// Includes the token fields.
ArrayTupleReference completeExpectedTuple = new ArrayTupleReference();
// Field permutation and permuting tuple reference to strip away token fields from completeExpectedTuple.
int[] fieldPermutation = new int[invListFieldCount];
for (int i = 0; i < fieldPermutation.length; i++) {
fieldPermutation[i] = tokenFieldCount + i;
}
PermutingTupleReference expectedTuple = new PermutingTupleReference(fieldPermutation);
// Iterate over all tokens. Find the inverted-lists in actual and expected indexes. Compare the inverted lists,
while (tokensIter.hasNext()) {
Comparable token = tokensIter.next();
// Position inverted-list iterator on expected index.
CheckTuple checkLowKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
checkLowKey.appendField(token);
CheckTuple checkHighKey = new CheckTuple(tokenFieldCount, tokenFieldCount);
checkHighKey.appendField(token);
SortedSet<CheckTuple> expectedInvList =
OrderedIndexTestUtils.getPrefixExpectedSubset(testCtx.getCheckTuples(), checkLowKey, checkHighKey);
Iterator<CheckTuple> expectedInvListIter = expectedInvList.iterator();
// Position inverted-list cursor in actual index.
OrderedIndexTestUtils.createTupleFromCheckTuple(checkLowKey, searchKeyBuilder, searchKey, fieldSerdes);
invIndexAccessor.openInvertedListCursor(actualInvListCursor, searchKey);
if (actualInvListCursor.size() != expectedInvList.size()) {
fail("Actual and expected inverted lists for token '" + token.toString()
+ "' have different sizes. Actual size: " + actualInvListCursor.size() + ". Expected size: "
+ expectedInvList.size() + ".");
}
// Compare inverted-list elements.
int count = 0;
actualInvListCursor.pinPages();
try {
while (actualInvListCursor.hasNext() && expectedInvListIter.hasNext()) {
actualInvListCursor.next();
ITupleReference actual = actualInvListCursor.getTuple();
CheckTuple expected = expectedInvListIter.next();
OrderedIndexTestUtils.createTupleFromCheckTuple(expected, expectedBuilder, completeExpectedTuple,
fieldSerdes);
expectedTuple.reset(completeExpectedTuple);
if (invListCmp.compare(actual, expectedTuple) != 0) {
fail("Inverted lists of token '" + token + "' differ at position " + count + ".");
}
count++;
}
} finally {
actualInvListCursor.unpinPages();
}
}
}
/**
* Determine the expected results with the simple ScanCount algorithm.
*/
public static void getExpectedResults(int[] scanCountArray, TreeSet<CheckTuple> checkTuples,
ITupleReference searchDocument, IBinaryTokenizer tokenizer, ISerializerDeserializer tokenSerde,
IInvertedIndexSearchModifier searchModifier, List<Integer> expectedResults, InvertedIndexType invIndexType)
throws IOException {
boolean isPartitioned = false;
switch (invIndexType) {
case INMEMORY:
case ONDISK:
case LSM: {
isPartitioned = false;
break;
}
case PARTITIONED_INMEMORY:
case PARTITIONED_ONDISK:
case PARTITIONED_LSM: {
isPartitioned = true;
break;
}
}
getExpectedResults(scanCountArray, checkTuples, searchDocument, tokenizer, tokenSerde, searchModifier,
expectedResults, isPartitioned);
}
@SuppressWarnings("unchecked")
public static void getExpectedResults(int[] scanCountArray, TreeSet<CheckTuple> checkTuples,
ITupleReference searchDocument, IBinaryTokenizer tokenizer, ISerializerDeserializer tokenSerde,
IInvertedIndexSearchModifier searchModifier, List<Integer> expectedResults, boolean isPartitioned)
throws IOException {
// Reset scan count array.
Arrays.fill(scanCountArray, 0);
expectedResults.clear();
GrowableArray tokenData = new GrowableArray();
tokenizer.reset(searchDocument.getFieldData(0), searchDocument.getFieldStart(0),
searchDocument.getFieldLength(0));
// Run though tokenizer to get number of tokens.
int numQueryTokens = 0;
while (tokenizer.hasNext()) {
tokenizer.next();
numQueryTokens++;
}
short numTokensLowerBound = -1;
short numTokensUpperBound = -1;
int invListElementField = 1;
if (isPartitioned) {
numTokensLowerBound = searchModifier.getNumTokensLowerBound((short) numQueryTokens);
numTokensUpperBound = searchModifier.getNumTokensUpperBound((short) numQueryTokens);
invListElementField = 2;
}
int occurrenceThreshold = searchModifier.getOccurrenceThreshold(numQueryTokens);
tokenizer.reset(searchDocument.getFieldData(0), searchDocument.getFieldStart(0),
searchDocument.getFieldLength(0));
while (tokenizer.hasNext()) {
tokenizer.next();
IToken token = tokenizer.getToken();
tokenData.reset();
token.serializeToken(tokenData);
ByteArrayInputStream inStream =
new ByteArrayInputStream(tokenData.getByteArray(), 0, tokenData.getLength());
DataInput dataIn = new DataInputStream(inStream);
Comparable tokenObj = (Comparable) tokenSerde.deserialize(dataIn);
CheckTuple lowKey;
if (numTokensLowerBound < 0) {
// Index is not partitioned, or no length filtering is possible for this search modifier.
lowKey = new CheckTuple(1, 1);
lowKey.appendField(tokenObj);
} else {
// Index is length partitioned, and search modifier supports length filtering.
lowKey = new CheckTuple(2, 2);
lowKey.appendField(tokenObj);
lowKey.appendField(Short.valueOf(numTokensLowerBound));
}
CheckTuple highKey;
if (numTokensUpperBound < 0) {
// Index is not partitioned, or no length filtering is possible for this search modifier.
highKey = new CheckTuple(1, 1);
highKey.appendField(tokenObj);
} else {
// Index is length partitioned, and search modifier supports length filtering.
highKey = new CheckTuple(2, 2);
highKey.appendField(tokenObj);
highKey.appendField(Short.valueOf(numTokensUpperBound));
}
// Get view over check tuples containing inverted-list corresponding to token.
SortedSet<CheckTuple> invList = OrderedIndexTestUtils.getPrefixExpectedSubset(checkTuples, lowKey, highKey);
Iterator<CheckTuple> invListIter = invList.iterator();
// Iterate over inverted list and update scan count array.
while (invListIter.hasNext()) {
CheckTuple checkTuple = invListIter.next();
Integer element = (Integer) checkTuple.getField(invListElementField);
scanCountArray[element]++;
}
}
// Run through scan count array, and see whether elements satisfy the given occurrence threshold.
expectedResults.clear();
for (int i = 0; i < scanCountArray.length; i++) {
if (scanCountArray[i] >= occurrenceThreshold) {
expectedResults.add(i);
}
}
}
public static void testIndexSearch(LSMInvertedIndexTestContext testCtx, TupleGenerator tupleGen, Random rnd,
int numDocQueries, int numRandomQueries, IInvertedIndexSearchModifier searchModifier, int[] scanCountArray)
throws IOException, HyracksDataException {
IInvertedIndex invIndex = testCtx.invIndex;
IInvertedIndexAccessor accessor = (IInvertedIndexAccessor) invIndex
.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
IBinaryTokenizer tokenizer = testCtx.getTokenizerFactory().createTokenizer();
InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
List<ITupleReference> documentCorpus = testCtx.getDocumentCorpus();
// Project away the primary-key field.
int[] fieldPermutation = new int[] { 0 };
PermutingTupleReference searchDocument = new PermutingTupleReference(fieldPermutation);
int numQueries = numDocQueries + numRandomQueries;
for (int i = 0; i < numQueries; i++) {
// If number of documents in the corpus is less than numDocQueries, then replace the remaining ones with random queries.
if (i >= numDocQueries || i >= documentCorpus.size()) {
// Generate a random query.
ITupleReference randomQuery = tupleGen.next();
searchDocument.reset(randomQuery);
} else {
// Pick a random document from the corpus to use as the search query.
int queryIndex = Math.abs(rnd.nextInt() % documentCorpus.size());
searchDocument.reset(documentCorpus.get(queryIndex));
}
// Set query tuple in search predicate.
searchPred.setQueryTuple(searchDocument);
searchPred.setQueryFieldIndex(0);
IIndexCursor resultCursor = accessor.createSearchCursor(false);
boolean panic = false;
try {
accessor.search(resultCursor, searchPred);
} catch (HyracksDataException e) {
// ignore panic queries.
if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
panic = true;
} else {
throw e;
}
}
try {
if (!panic) {
// Consume cursor and deserialize results so we can sort them. Some search cursors may not deliver the result sorted (e.g., LSM search cursor).
ArrayList<Integer> actualResults = new ArrayList<>();
try {
while (resultCursor.hasNext()) {
resultCursor.next();
ITupleReference resultTuple = resultCursor.getTuple();
int actual = IntegerPointable.getInteger(resultTuple.getFieldData(0),
resultTuple.getFieldStart(0));
actualResults.add(Integer.valueOf(actual));
}
} catch (HyracksDataException e) {
if (e.getErrorCode() == ErrorCode.OCCURRENCE_THRESHOLD_PANIC_EXCEPTION) {
// Ignore panic queries.
continue;
} else {
throw e;
}
}
Collections.sort(actualResults);
// Get expected results.
List<Integer> expectedResults = new ArrayList<>();
LSMInvertedIndexTestUtils.getExpectedResults(scanCountArray, testCtx.getCheckTuples(),
searchDocument, tokenizer, testCtx.getFieldSerdes()[0], searchModifier, expectedResults,
testCtx.getInvertedIndexType());
Iterator<Integer> expectedIter = expectedResults.iterator();
Iterator<Integer> actualIter = actualResults.iterator();
while (expectedIter.hasNext() && actualIter.hasNext()) {
int expected = expectedIter.next();
int actual = actualIter.next();
if (actual != expected) {
fail("Query results do not match. Encountered: " + actual + ". Expected: " + expected + "");
}
}
if (expectedIter.hasNext()) {
fail("Query results do not match. Actual results missing.");
}
if (actualIter.hasNext()) {
fail("Query results do not match. Actual contains too many results.");
}
}
} finally {
resultCursor.close();
}
}
}
}
| heriram/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestUtils.java | Java | apache-2.0 | 31,192 |
/**
* Copyright Acropolis Software SPRL (https://www.acrosoft.be)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package be.acrosoft.gaia.shared.dispatch;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation is used on {@link Listener} implementations to indicate that instances of these
* listeners should be stored as {@link java.lang.ref.WeakReference} as much as possible, allowing for
* their collection even though they are registered as listeners in listener groups.
* @see Listener
* @see java.lang.ref.WeakReference
*/
@Target({ElementType.TYPE})
@Retention(value=RetentionPolicy.RUNTIME)
public @interface WeakListener
{
}
| acrosoft-be/shared | Dispatch/src/main/java/be/acrosoft/gaia/shared/dispatch/WeakListener.java | Java | apache-2.0 | 1,283 |
/*******************************************************************************
* Copyright 2013-2014 Gengyu (Univer) Shi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.shigengyu.hyperion.core;
/**
* The common interface for all compensators. Compensators need to be designed as stateless.
*
* @author Gengyu (Univer) Shi
*
*/
public interface TransitionCompensator {
boolean canHandle(Exception exception);
TransitionCompensationResult compensate(WorkflowInstance workflowInstance);
}
| shigengyu/Hyperion | src/main/java/com/shigengyu/hyperion/core/TransitionCompensator.java | Java | apache-2.0 | 1,103 |
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Defines interface for DB access.
Functions in this module are imported into the nova.db namespace. Call these
functions from nova.db namespace, not the nova.db.api namespace.
All functions in this module return objects that implement a dictionary-like
interface. Currently, many of these objects are sqlalchemy objects that
implement a dictionary interface. However, a future goal is to have all of
these objects be simple dictionaries.
"""
from eventlet import tpool
from oslo.config import cfg
from nova.cells import rpcapi as cells_rpcapi
from nova.openstack.common.db import api as db_api
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
db_opts = [
cfg.BoolOpt('enable_new_services',
default=True,
help='Services to be added to the available pool on create'),
cfg.StrOpt('instance_name_template',
default='instance-%08x',
help='Template string to be used to generate instance names'),
cfg.StrOpt('snapshot_name_template',
default='snapshot-%s',
help='Template string to be used to generate snapshot names'),
]
tpool_opts = [
cfg.BoolOpt('use_tpool',
default=False,
deprecated_name='dbapi_use_tpool',
deprecated_group='DEFAULT',
help='Enable the experimental use of thread pooling for '
'all DB API calls'),
]
CONF = cfg.CONF
CONF.register_opts(db_opts)
CONF.register_opts(tpool_opts, 'database')
CONF.import_opt('backend', 'nova.openstack.common.db.options',
group='database')
_BACKEND_MAPPING = {'sqlalchemy': 'nova.db.sqlalchemy.api'}
class NovaDBAPI(object):
"""Nova's DB API wrapper class.
This wraps the oslo DB API with an option to be able to use eventlet's
thread pooling. Since the CONF variable may not be loaded at the time
this class is instantiated, we must look at it on the first DB API call.
"""
def __init__(self):
self.__db_api = None
@property
def _db_api(self):
if not self.__db_api:
nova_db_api = db_api.DBAPI(CONF.database.backend,
backend_mapping=_BACKEND_MAPPING)
if CONF.database.use_tpool:
self.__db_api = tpool.Proxy(nova_db_api)
else:
self.__db_api = nova_db_api
return self.__db_api
def __getattr__(self, key):
return getattr(self._db_api, key)
IMPL = NovaDBAPI()
LOG = logging.getLogger(__name__)
# The maximum value a signed INT type may have
MAX_INT = 0x7FFFFFFF
###################
def constraint(**conditions):
"""Return a constraint object suitable for use with some updates."""
return IMPL.constraint(**conditions)
def equal_any(*values):
"""Return an equality condition object suitable for use in a constraint.
Equal_any conditions require that a model object's attribute equal any
one of the given values.
"""
return IMPL.equal_any(*values)
def not_equal(*values):
"""Return an inequality condition object suitable for use in a constraint.
Not_equal conditions require that a model object's attribute differs from
all of the given values.
"""
return IMPL.not_equal(*values)
###################
def service_destroy(context, service_id):
"""Destroy the service or raise if it does not exist."""
return IMPL.service_destroy(context, service_id)
def service_get(context, service_id, with_compute_node=False):
"""Get a service or raise if it does not exist."""
return IMPL.service_get(context, service_id,
with_compute_node=with_compute_node)
def service_get_by_host_and_topic(context, host, topic):
"""Get a service by host it's on and topic it listens to."""
return IMPL.service_get_by_host_and_topic(context, host, topic)
def service_get_all(context, disabled=None):
"""Get all services."""
return IMPL.service_get_all(context, disabled)
def service_get_all_by_topic(context, topic):
"""Get all services for a given topic."""
return IMPL.service_get_all_by_topic(context, topic)
def service_get_all_by_host(context, host):
"""Get all services for a given host."""
return IMPL.service_get_all_by_host(context, host)
def service_get_by_compute_host(context, host):
"""Get the service entry for a given compute host.
Returns the service entry joined with the compute_node entry.
"""
return IMPL.service_get_by_compute_host(context, host)
def service_get_by_args(context, host, binary):
"""Get the state of a service by node name and binary."""
return IMPL.service_get_by_args(context, host, binary)
def service_create(context, values):
"""Create a service from the values dictionary."""
return IMPL.service_create(context, values)
def service_update(context, service_id, values):
"""Set the given properties on a service and update it.
Raises NotFound if service does not exist.
"""
return IMPL.service_update(context, service_id, values)
###################
def compute_node_get(context, compute_id):
"""Get a compute node by its id.
:param context: The security context
:param compute_id: ID of the compute node
:returns: Dictionary-like object containing properties of the compute node,
including its corresponding service
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_get(context, compute_id)
def compute_node_get_by_service_id(context, service_id):
"""Get a compute node by its associated service id.
:param context: The security context
:param service_id: ID of the associated service
:returns: Dictionary-like object containing properties of the compute node,
including its corresponding service and statistics
Raises ServiceNotFound if service with the given ID doesn't exist.
"""
return IMPL.compute_node_get_by_service_id(context, service_id)
def compute_node_get_all(context, no_date_fields=False):
"""Get all computeNodes.
:param context: The security context
:param no_date_fields: If set to True, excludes 'created_at', 'updated_at',
'deleted_at' and 'deleted' fields from the output,
thus significantly reducing its size.
Set to False by default
:returns: List of dictionaries each containing compute node properties,
including corresponding service
"""
return IMPL.compute_node_get_all(context, no_date_fields)
def compute_node_search_by_hypervisor(context, hypervisor_match):
"""Get compute nodes by hypervisor hostname.
:param context: The security context
:param hypervisor_match: The hypervisor hostname
:returns: List of dictionary-like objects each containing compute node
properties, including corresponding service
"""
return IMPL.compute_node_search_by_hypervisor(context, hypervisor_match)
def compute_node_create(context, values):
"""Create a compute node from the values dictionary.
:param context: The security context
:param values: Dictionary containing compute node properties
:returns: Dictionary-like object containing the properties of the created
node, including its corresponding service and statistics
"""
return IMPL.compute_node_create(context, values)
def compute_node_update(context, compute_id, values):
"""Set the given properties on a compute node and update it.
:param context: The security context
:param compute_id: ID of the compute node
:param values: Dictionary containing compute node properties to be updated
:returns: Dictionary-like object containing the properties of the updated
compute node, including its corresponding service and statistics
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_update(context, compute_id, values)
def compute_node_delete(context, compute_id):
"""Delete a compute node from the database.
:param context: The security context
:param compute_id: ID of the compute node
Raises ComputeHostNotFound if compute node with the given ID doesn't exist.
"""
return IMPL.compute_node_delete(context, compute_id)
def compute_node_statistics(context):
"""Get aggregate statistics over all compute nodes.
:param context: The security context
:returns: Dictionary containing compute node characteristics summed up
over all the compute nodes, e.g. 'vcpus', 'free_ram_mb' etc.
"""
return IMPL.compute_node_statistics(context)
###################
def certificate_create(context, values):
"""Create a certificate from the values dictionary."""
return IMPL.certificate_create(context, values)
def certificate_get_all_by_project(context, project_id):
"""Get all certificates for a project."""
return IMPL.certificate_get_all_by_project(context, project_id)
def certificate_get_all_by_user(context, user_id):
"""Get all certificates for a user."""
return IMPL.certificate_get_all_by_user(context, user_id)
def certificate_get_all_by_user_and_project(context, user_id, project_id):
"""Get all certificates for a user and project."""
return IMPL.certificate_get_all_by_user_and_project(context,
user_id,
project_id)
###################
def floating_ip_get(context, id):
return IMPL.floating_ip_get(context, id)
def floating_ip_get_pools(context):
"""Returns a list of floating ip pools."""
return IMPL.floating_ip_get_pools(context)
def floating_ip_allocate_address(context, project_id, pool,
auto_assigned=False):
"""Allocate free floating ip from specified pool and return the address.
Raises if one is not available.
"""
return IMPL.floating_ip_allocate_address(context, project_id, pool,
auto_assigned)
def floating_ip_bulk_create(context, ips):
"""Create a lot of floating ips from the values dictionary."""
return IMPL.floating_ip_bulk_create(context, ips)
def floating_ip_bulk_destroy(context, ips):
"""Destroy a lot of floating ips from the values dictionary."""
return IMPL.floating_ip_bulk_destroy(context, ips)
def floating_ip_create(context, values):
"""Create a floating ip from the values dictionary."""
return IMPL.floating_ip_create(context, values)
def floating_ip_deallocate(context, address):
"""Deallocate a floating ip by address."""
return IMPL.floating_ip_deallocate(context, address)
def floating_ip_destroy(context, address):
"""Destroy the floating_ip or raise if it does not exist."""
return IMPL.floating_ip_destroy(context, address)
def floating_ip_disassociate(context, address):
"""Disassociate a floating ip from a fixed ip by address.
:returns: the fixed ip record joined to network record or None
if the ip was not associated to an ip.
"""
return IMPL.floating_ip_disassociate(context, address)
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
"""Associate a floating ip to a fixed_ip by address.
:returns: the fixed ip record joined to network record or None
if the ip was already associated to the fixed ip.
"""
return IMPL.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
host)
def floating_ip_get_all(context):
"""Get all floating ips."""
return IMPL.floating_ip_get_all(context)
def floating_ip_get_all_by_host(context, host):
"""Get all floating ips by host."""
return IMPL.floating_ip_get_all_by_host(context, host)
def floating_ip_get_all_by_project(context, project_id):
"""Get all floating ips by project."""
return IMPL.floating_ip_get_all_by_project(context, project_id)
def floating_ip_get_by_address(context, address):
"""Get a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_get_by_address(context, address)
def floating_ip_get_by_fixed_address(context, fixed_address):
"""Get a floating ips by fixed address."""
return IMPL.floating_ip_get_by_fixed_address(context, fixed_address)
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id):
"""Get a floating ips by fixed address."""
return IMPL.floating_ip_get_by_fixed_ip_id(context, fixed_ip_id)
def floating_ip_update(context, address, values):
"""Update a floating ip by address or raise if it doesn't exist."""
return IMPL.floating_ip_update(context, address, values)
def floating_ip_set_auto_assigned(context, address):
"""Set auto_assigned flag to floating ip."""
return IMPL.floating_ip_set_auto_assigned(context, address)
def dnsdomain_list(context):
"""Get a list of all zones in our database, public and private."""
return IMPL.dnsdomain_list(context)
def dnsdomain_get_all(context):
"""Get a list of all dnsdomains in our database."""
return IMPL.dnsdomain_get_all(context)
def dnsdomain_register_for_zone(context, fqdomain, zone):
"""Associated a DNS domain with an availability zone."""
return IMPL.dnsdomain_register_for_zone(context, fqdomain, zone)
def dnsdomain_register_for_project(context, fqdomain, project):
"""Associated a DNS domain with a project id."""
return IMPL.dnsdomain_register_for_project(context, fqdomain, project)
def dnsdomain_unregister(context, fqdomain):
"""Purge associations for the specified DNS zone."""
return IMPL.dnsdomain_unregister(context, fqdomain)
def dnsdomain_get(context, fqdomain):
"""Get the db record for the specified domain."""
return IMPL.dnsdomain_get(context, fqdomain)
####################
def migration_update(context, id, values):
"""Update a migration instance."""
return IMPL.migration_update(context, id, values)
def migration_create(context, values):
"""Create a migration record."""
return IMPL.migration_create(context, values)
def migration_get(context, migration_id):
"""Finds a migration by the id."""
return IMPL.migration_get(context, migration_id)
def migration_get_by_instance_and_status(context, instance_uuid, status):
"""Finds a migration by the instance uuid its migrating."""
return IMPL.migration_get_by_instance_and_status(context, instance_uuid,
status)
def migration_get_unconfirmed_by_dest_compute(context, confirm_window,
dest_compute, use_slave=False):
"""Finds all unconfirmed migrations within the confirmation window for
a specific destination compute host.
"""
return IMPL.migration_get_unconfirmed_by_dest_compute(context,
confirm_window, dest_compute, use_slave=use_slave)
def migration_get_in_progress_by_host_and_node(context, host, node):
"""Finds all migrations for the given host + node that are not yet
confirmed or reverted.
"""
return IMPL.migration_get_in_progress_by_host_and_node(context, host, node)
def migration_get_all_by_filters(context, filters):
"""Finds all migrations in progress."""
return IMPL.migration_get_all_by_filters(context, filters)
####################
def fixed_ip_associate(context, address, instance_uuid, network_id=None,
reserved=False):
"""Associate fixed ip to instance.
Raises if fixed ip is not available.
"""
return IMPL.fixed_ip_associate(context, address, instance_uuid, network_id,
reserved)
def fixed_ip_associate_pool(context, network_id, instance_uuid=None,
host=None):
"""Find free ip in network and associate it to instance or host.
Raises if one is not available.
"""
return IMPL.fixed_ip_associate_pool(context, network_id,
instance_uuid, host)
def fixed_ip_create(context, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_create(context, values)
def fixed_ip_bulk_create(context, ips):
"""Create a lot of fixed ips from the values dictionary."""
return IMPL.fixed_ip_bulk_create(context, ips)
def fixed_ip_disassociate(context, address):
"""Disassociate a fixed ip from an instance by address."""
return IMPL.fixed_ip_disassociate(context, address)
def fixed_ip_disassociate_all_by_timeout(context, host, time):
"""Disassociate old fixed ips from host."""
return IMPL.fixed_ip_disassociate_all_by_timeout(context, host, time)
def fixed_ip_get(context, id, get_network=False):
"""Get fixed ip by id or raise if it does not exist.
If get_network is true, also return the associated network.
"""
return IMPL.fixed_ip_get(context, id, get_network)
def fixed_ip_get_all(context):
"""Get all defined fixed ips."""
return IMPL.fixed_ip_get_all(context)
def fixed_ip_get_by_address(context, address, columns_to_join=None):
"""Get a fixed ip by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address(context, address,
columns_to_join=columns_to_join)
def fixed_ip_get_by_address_detailed(context, address):
"""Get detailed fixed ip info by address or raise if it does not exist."""
return IMPL.fixed_ip_get_by_address_detailed(context, address)
def fixed_ip_get_by_floating_address(context, floating_address):
"""Get a fixed ip by a floating address."""
return IMPL.fixed_ip_get_by_floating_address(context, floating_address)
def fixed_ip_get_by_instance(context, instance_uuid):
"""Get fixed ips by instance or raise if none exist."""
return IMPL.fixed_ip_get_by_instance(context, instance_uuid)
def fixed_ip_get_by_host(context, host):
"""Get fixed ips by compute host."""
return IMPL.fixed_ip_get_by_host(context, host)
def fixed_ip_get_by_network_host(context, network_uuid, host):
"""Get fixed ip for a host in a network."""
return IMPL.fixed_ip_get_by_network_host(context, network_uuid, host)
def fixed_ips_by_virtual_interface(context, vif_id):
"""Get fixed ips by virtual interface or raise if none exist."""
return IMPL.fixed_ips_by_virtual_interface(context, vif_id)
def fixed_ip_update(context, address, values):
"""Create a fixed ip from the values dictionary."""
return IMPL.fixed_ip_update(context, address, values)
####################
def virtual_interface_create(context, values):
"""Create a virtual interface record in the database."""
return IMPL.virtual_interface_create(context, values)
def virtual_interface_get(context, vif_id):
"""Gets a virtual interface from the table."""
return IMPL.virtual_interface_get(context, vif_id)
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table filtering on address."""
return IMPL.virtual_interface_get_by_address(context, address)
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table filtering on vif uuid."""
return IMPL.virtual_interface_get_by_uuid(context, vif_uuid)
def virtual_interface_get_by_instance(context, instance_id, use_slave=False):
"""Gets all virtual_interfaces for instance."""
return IMPL.virtual_interface_get_by_instance(context, instance_id,
use_slave=use_slave)
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets all virtual interfaces for instance."""
return IMPL.virtual_interface_get_by_instance_and_network(context,
instance_id,
network_id)
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records associated with instance."""
return IMPL.virtual_interface_delete_by_instance(context, instance_id)
def virtual_interface_get_all(context):
"""Gets all virtual interfaces from the table."""
return IMPL.virtual_interface_get_all(context)
####################
def instance_create(context, values):
"""Create an instance from the values dictionary."""
return IMPL.instance_create(context, values)
def instance_destroy(context, instance_uuid, constraint=None,
update_cells=True):
"""Destroy the instance or raise if it does not exist."""
rv = IMPL.instance_destroy(context, instance_uuid, constraint)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_destroy_at_top(context, rv)
except Exception:
LOG.exception(_("Failed to notify cells of instance destroy"))
return rv
def instance_get_by_uuid(context, uuid, columns_to_join=None, use_slave=False):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get_by_uuid(context, uuid,
columns_to_join, use_slave=use_slave)
def instance_get(context, instance_id, columns_to_join=None):
"""Get an instance or raise if it does not exist."""
return IMPL.instance_get(context, instance_id,
columns_to_join=columns_to_join)
def instance_get_all(context, columns_to_join=None):
"""Get all instances."""
return IMPL.instance_get_all(context, columns_to_join=columns_to_join)
def instance_get_all_by_filters(context, filters, sort_key='created_at',
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False):
"""Get all instances that match all filters."""
return IMPL.instance_get_all_by_filters(context, filters, sort_key,
sort_dir, limit=limit,
marker=marker,
columns_to_join=columns_to_join,
use_slave=use_slave)
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None):
"""Get instances and joins active during a certain time window.
Specifying a project_id will filter for a certain project.
Specifying a host will filter for instances on a given compute host.
"""
return IMPL.instance_get_active_by_window_joined(context, begin, end,
project_id, host)
def instance_get_all_by_host(context, host,
columns_to_join=None, use_slave=False):
"""Get all instances belonging to a host."""
return IMPL.instance_get_all_by_host(context, host,
columns_to_join,
use_slave=use_slave)
def instance_get_all_by_host_and_node(context, host, node):
"""Get all instances belonging to a node."""
return IMPL.instance_get_all_by_host_and_node(context, host, node)
def instance_get_all_by_host_and_not_type(context, host, type_id=None):
"""Get all instances belonging to a host with a different type_id."""
return IMPL.instance_get_all_by_host_and_not_type(context, host, type_id)
def instance_get_floating_address(context, instance_id):
"""Get the first floating ip address of an instance."""
return IMPL.instance_get_floating_address(context, instance_id)
def instance_floating_address_get_all(context, instance_uuid):
"""Get all floating ip addresses of an instance."""
return IMPL.instance_floating_address_get_all(context, instance_uuid)
# NOTE(hanlind): This method can be removed as conductor RPC API moves to v2.0.
def instance_get_all_hung_in_rebooting(context, reboot_window):
"""Get all instances stuck in a rebooting state."""
return IMPL.instance_get_all_hung_in_rebooting(context, reboot_window)
def instance_update(context, instance_uuid, values, update_cells=True):
"""Set the given properties on an instance and update it.
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update(context, instance_uuid, values)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_update_at_top(context, rv)
except Exception:
LOG.exception(_("Failed to notify cells of instance update"))
return rv
# FIXME(comstud): 'update_cells' is temporary as we transition to using
# objects. When everything is using Instance.save(), we can remove the
# argument and the RPC to nova-cells.
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None):
"""Set the given properties on an instance and update it. Return
a shallow copy of the original instance reference, as well as the
updated one.
:param context: = request context object
:param instance_uuid: = instance id or uuid
:param values: = dict containing column values
:returns: a tuple of the form (old_instance_ref, new_instance_ref)
Raises NotFound if instance does not exist.
"""
rv = IMPL.instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=columns_to_join)
if update_cells:
try:
cells_rpcapi.CellsAPI().instance_update_at_top(context, rv[1])
except Exception:
LOG.exception(_("Failed to notify cells of instance update"))
return rv
def instance_add_security_group(context, instance_id, security_group_id):
"""Associate the given security group with the given instance."""
return IMPL.instance_add_security_group(context, instance_id,
security_group_id)
def instance_remove_security_group(context, instance_id, security_group_id):
"""Disassociate the given security group from the given instance."""
return IMPL.instance_remove_security_group(context, instance_id,
security_group_id)
####################
def instance_group_create(context, values, policies=None, metadata=None,
members=None):
"""Create a new group with metadata.
Each group will receive a unique uuid. This will be used for access to the
group.
"""
return IMPL.instance_group_create(context, values, policies, metadata,
members)
def instance_group_get(context, group_uuid):
"""Get a specific group by id."""
return IMPL.instance_group_get(context, group_uuid)
def instance_group_update(context, group_uuid, values):
"""Update the attributes of an group."""
return IMPL.instance_group_update(context, group_uuid, values)
def instance_group_delete(context, group_uuid):
"""Delete an group."""
return IMPL.instance_group_delete(context, group_uuid)
def instance_group_get_all(context):
"""Get all groups."""
return IMPL.instance_group_get_all(context)
def instance_group_get_all_by_project_id(context, project_id):
"""Get all groups for a specific project_id."""
return IMPL.instance_group_get_all_by_project_id(context, project_id)
def instance_group_metadata_add(context, group_uuid, metadata,
set_delete=False):
"""Add metadata to the group."""
return IMPL.instance_group_metadata_add(context, group_uuid, metadata,
set_delete)
def instance_group_metadata_delete(context, group_uuid, key):
"""Delete metadata from the group."""
return IMPL.instance_group_metadata_delete(context, group_uuid, key)
def instance_group_metadata_get(context, group_uuid):
"""Get the metadata from the group."""
return IMPL.instance_group_metadata_get(context, group_uuid)
def instance_group_members_add(context, group_uuid, members,
set_delete=False):
"""Add members to the group."""
return IMPL.instance_group_members_add(context, group_uuid, members,
set_delete=set_delete)
def instance_group_member_delete(context, group_uuid, instance_id):
"""Delete a specific member from the group."""
return IMPL.instance_group_member_delete(context, group_uuid, instance_id)
def instance_group_members_get(context, group_uuid):
"""Get the members from the group."""
return IMPL.instance_group_members_get(context, group_uuid)
def instance_group_policies_add(context, group_uuid, policies,
set_delete=False):
"""Add policies to the group."""
return IMPL.instance_group_policies_add(context, group_uuid, policies,
set_delete=set_delete)
def instance_group_policy_delete(context, group_uuid, policy):
"""Delete a specific policy from the group."""
return IMPL.instance_group_policy_delete(context, group_uuid, policy)
def instance_group_policies_get(context, group_uuid):
"""Get the policies from the group."""
return IMPL.instance_group_policies_get(context, group_uuid)
###################
def instance_info_cache_get(context, instance_uuid):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
"""
return IMPL.instance_info_cache_get(context, instance_uuid)
def instance_info_cache_update(context, instance_uuid, values):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
"""
return IMPL.instance_info_cache_update(context, instance_uuid, values)
def instance_info_cache_delete(context, instance_uuid):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
"""
return IMPL.instance_info_cache_delete(context, instance_uuid)
###################
def key_pair_create(context, values):
"""Create a key_pair from the values dictionary."""
return IMPL.key_pair_create(context, values)
def key_pair_destroy(context, user_id, name):
"""Destroy the key_pair or raise if it does not exist."""
return IMPL.key_pair_destroy(context, user_id, name)
def key_pair_get(context, user_id, name):
"""Get a key_pair or raise if it does not exist."""
return IMPL.key_pair_get(context, user_id, name)
def key_pair_get_all_by_user(context, user_id):
"""Get all key_pairs by user."""
return IMPL.key_pair_get_all_by_user(context, user_id)
def key_pair_count_by_user(context, user_id):
"""Count number of key pairs for the given user ID."""
return IMPL.key_pair_count_by_user(context, user_id)
####################
def network_associate(context, project_id, network_id=None, force=False):
"""Associate a free network to a project."""
return IMPL.network_associate(context, project_id, network_id, force)
def network_count_reserved_ips(context, network_id):
"""Return the number of reserved ips in the network."""
return IMPL.network_count_reserved_ips(context, network_id)
def network_create_safe(context, values):
"""Create a network from the values dict.
The network is only returned if the create succeeds. If the create violates
constraints because the network already exists, no exception is raised.
"""
return IMPL.network_create_safe(context, values)
def network_delete_safe(context, network_id):
"""Delete network with key network_id.
This method assumes that the network is not associated with any project
"""
return IMPL.network_delete_safe(context, network_id)
def network_disassociate(context, network_id, disassociate_host=True,
disassociate_project=True):
"""Disassociate the network from project or host
Raises if it does not exist.
"""
return IMPL.network_disassociate(context, network_id, disassociate_host,
disassociate_project)
def network_get(context, network_id, project_only="allow_none"):
"""Get a network or raise if it does not exist."""
return IMPL.network_get(context, network_id, project_only=project_only)
def network_get_all(context, project_only="allow_none"):
"""Return all defined networks."""
return IMPL.network_get_all(context, project_only)
def network_get_all_by_uuids(context, network_uuids,
project_only="allow_none"):
"""Return networks by ids."""
return IMPL.network_get_all_by_uuids(context, network_uuids,
project_only=project_only)
# pylint: disable=C0103
def network_in_use_on_host(context, network_id, host=None):
"""Indicates if a network is currently in use on host."""
return IMPL.network_in_use_on_host(context, network_id, host)
def network_get_associated_fixed_ips(context, network_id, host=None):
"""Get all network's ips that have been associated."""
return IMPL.network_get_associated_fixed_ips(context, network_id, host)
def network_get_by_uuid(context, uuid):
"""Get a network by uuid or raise if it does not exist."""
return IMPL.network_get_by_uuid(context, uuid)
def network_get_by_cidr(context, cidr):
"""Get a network by cidr or raise if it does not exist."""
return IMPL.network_get_by_cidr(context, cidr)
def network_get_all_by_host(context, host):
"""All networks for which the given host is the network host."""
return IMPL.network_get_all_by_host(context, host)
def network_set_host(context, network_id, host_id):
"""Safely set the host for network."""
return IMPL.network_set_host(context, network_id, host_id)
def network_update(context, network_id, values):
"""Set the given properties on a network and update it.
Raises NotFound if network does not exist.
"""
return IMPL.network_update(context, network_id, values)
###############
def quota_create(context, project_id, resource, limit, user_id=None):
"""Create a quota for the given project and resource."""
return IMPL.quota_create(context, project_id, resource, limit,
user_id=user_id)
def quota_get(context, project_id, resource, user_id=None):
"""Retrieve a quota or raise if it does not exist."""
return IMPL.quota_get(context, project_id, resource, user_id=user_id)
def quota_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all quotas associated with a given project and user."""
return IMPL.quota_get_all_by_project_and_user(context, project_id, user_id)
def quota_get_all_by_project(context, project_id):
"""Retrieve all quotas associated with a given project."""
return IMPL.quota_get_all_by_project(context, project_id)
def quota_get_all(context, project_id):
"""Retrieve all user quotas associated with a given project."""
return IMPL.quota_get_all(context, project_id)
def quota_update(context, project_id, resource, limit, user_id=None):
"""Update a quota or raise if it does not exist."""
return IMPL.quota_update(context, project_id, resource, limit,
user_id=user_id)
###################
def quota_usage_get(context, project_id, resource, user_id=None):
"""Retrieve a quota usage or raise if it does not exist."""
return IMPL.quota_usage_get(context, project_id, resource, user_id=user_id)
def quota_usage_get_all_by_project_and_user(context, project_id, user_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project_and_user(context,
project_id, user_id)
def quota_usage_get_all_by_project(context, project_id):
"""Retrieve all usage associated with a given resource."""
return IMPL.quota_usage_get_all_by_project(context, project_id)
def quota_usage_update(context, project_id, user_id, resource, **kwargs):
"""Update a quota usage or raise if it does not exist."""
return IMPL.quota_usage_update(context, project_id, user_id, resource,
**kwargs)
###################
def quota_reserve(context, resources, quotas, user_quotas, deltas, expire,
until_refresh, max_age, project_id=None, user_id=None):
"""Check quotas and create appropriate reservations."""
return IMPL.quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age,
project_id=project_id, user_id=user_id)
def reservation_commit(context, reservations, project_id=None, user_id=None):
"""Commit quota reservations."""
return IMPL.reservation_commit(context, reservations,
project_id=project_id,
user_id=user_id)
def reservation_rollback(context, reservations, project_id=None, user_id=None):
"""Roll back quota reservations."""
return IMPL.reservation_rollback(context, reservations,
project_id=project_id,
user_id=user_id)
def quota_destroy_all_by_project_and_user(context, project_id, user_id):
"""Destroy all quotas associated with a given project and user."""
return IMPL.quota_destroy_all_by_project_and_user(context,
project_id, user_id)
def quota_destroy_all_by_project(context, project_id):
"""Destroy all quotas associated with a given project."""
return IMPL.quota_destroy_all_by_project(context, project_id)
def reservation_expire(context):
"""Roll back any expired reservations."""
return IMPL.reservation_expire(context)
###################
def ec2_volume_create(context, volume_id, forced_id=None):
return IMPL.ec2_volume_create(context, volume_id, forced_id)
def ec2_volume_get_by_id(context, volume_id):
return IMPL.ec2_volume_get_by_id(context, volume_id)
def ec2_volume_get_by_uuid(context, volume_uuid):
return IMPL.ec2_volume_get_by_uuid(context, volume_uuid)
def get_snapshot_uuid_by_ec2_id(context, ec2_id):
return IMPL.get_snapshot_uuid_by_ec2_id(context, ec2_id)
def get_ec2_snapshot_id_by_uuid(context, snapshot_id):
return IMPL.get_ec2_snapshot_id_by_uuid(context, snapshot_id)
def ec2_snapshot_create(context, snapshot_id, forced_id=None):
return IMPL.ec2_snapshot_create(context, snapshot_id, forced_id)
####################
def block_device_mapping_create(context, values, legacy=True):
"""Create an entry of block device mapping."""
return IMPL.block_device_mapping_create(context, values, legacy)
def block_device_mapping_update(context, bdm_id, values, legacy=True):
"""Update an entry of block device mapping."""
return IMPL.block_device_mapping_update(context, bdm_id, values, legacy)
def block_device_mapping_update_or_create(context, values, legacy=True):
"""Update an entry of block device mapping.
If not existed, create a new entry
"""
return IMPL.block_device_mapping_update_or_create(context, values, legacy)
def block_device_mapping_get_all_by_instance(context, instance_uuid,
use_slave=False):
"""Get all block device mapping belonging to an instance."""
return IMPL.block_device_mapping_get_all_by_instance(context,
instance_uuid,
use_slave)
def block_device_mapping_get_by_volume_id(context, volume_id,
columns_to_join=None):
"""Get block device mapping for a given volume."""
return IMPL.block_device_mapping_get_by_volume_id(context, volume_id,
columns_to_join)
def block_device_mapping_destroy(context, bdm_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy(context, bdm_id)
def block_device_mapping_destroy_by_instance_and_device(context, instance_uuid,
device_name):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_device(
context, instance_uuid, device_name)
def block_device_mapping_destroy_by_instance_and_volume(context, instance_uuid,
volume_id):
"""Destroy the block device mapping."""
return IMPL.block_device_mapping_destroy_by_instance_and_volume(
context, instance_uuid, volume_id)
####################
def security_group_get_all(context):
"""Get all security groups."""
return IMPL.security_group_get_all(context)
def security_group_get(context, security_group_id, columns_to_join=None):
"""Get security group by its id."""
return IMPL.security_group_get(context, security_group_id,
columns_to_join)
def security_group_get_by_name(context, project_id, group_name,
columns_to_join=None):
"""Returns a security group with the specified name from a project."""
return IMPL.security_group_get_by_name(context, project_id, group_name,
columns_to_join=None)
def security_group_get_by_project(context, project_id):
"""Get all security groups belonging to a project."""
return IMPL.security_group_get_by_project(context, project_id)
def security_group_get_by_instance(context, instance_uuid):
"""Get security groups to which the instance is assigned."""
return IMPL.security_group_get_by_instance(context, instance_uuid)
def security_group_in_use(context, group_id):
"""Indicates if a security group is currently in use."""
return IMPL.security_group_in_use(context, group_id)
def security_group_create(context, values):
"""Create a new security group."""
return IMPL.security_group_create(context, values)
def security_group_update(context, security_group_id, values,
columns_to_join=None):
"""Update a security group."""
return IMPL.security_group_update(context, security_group_id, values,
columns_to_join=columns_to_join)
def security_group_ensure_default(context):
"""Ensure default security group exists for a project_id.
Returns a tuple with the first element being a bool indicating
if the default security group previously existed. Second
element is the dict used to create the default security group.
"""
return IMPL.security_group_ensure_default(context)
def security_group_destroy(context, security_group_id):
"""Deletes a security group."""
return IMPL.security_group_destroy(context, security_group_id)
####################
def security_group_rule_create(context, values):
"""Create a new security group."""
return IMPL.security_group_rule_create(context, values)
def security_group_rule_get_by_security_group(context, security_group_id,
columns_to_join=None):
"""Get all rules for a given security group."""
return IMPL.security_group_rule_get_by_security_group(
context, security_group_id, columns_to_join=columns_to_join)
def security_group_rule_get_by_security_group_grantee(context,
security_group_id):
"""Get all rules that grant access to the given security group."""
return IMPL.security_group_rule_get_by_security_group_grantee(context,
security_group_id)
def security_group_rule_destroy(context, security_group_rule_id):
"""Deletes a security group rule."""
return IMPL.security_group_rule_destroy(context, security_group_rule_id)
def security_group_rule_get(context, security_group_rule_id):
"""Gets a security group rule."""
return IMPL.security_group_rule_get(context, security_group_rule_id)
def security_group_rule_count_by_group(context, security_group_id):
"""Count rules in a given security group."""
return IMPL.security_group_rule_count_by_group(context, security_group_id)
###################
def security_group_default_rule_get(context, security_group_rule_default_id):
return IMPL.security_group_default_rule_get(context,
security_group_rule_default_id)
def security_group_default_rule_destroy(context,
security_group_rule_default_id):
return IMPL.security_group_default_rule_destroy(
context, security_group_rule_default_id)
def security_group_default_rule_create(context, values):
return IMPL.security_group_default_rule_create(context, values)
def security_group_default_rule_list(context):
return IMPL.security_group_default_rule_list(context)
###################
def provider_fw_rule_create(context, rule):
"""Add a firewall rule at the provider level (all hosts & instances)."""
return IMPL.provider_fw_rule_create(context, rule)
def provider_fw_rule_get_all(context):
"""Get all provider-level firewall rules."""
return IMPL.provider_fw_rule_get_all(context)
def provider_fw_rule_destroy(context, rule_id):
"""Delete a provider firewall rule from the database."""
return IMPL.provider_fw_rule_destroy(context, rule_id)
###################
def project_get_networks(context, project_id, associate=True):
"""Return the network associated with the project.
If associate is true, it will attempt to associate a new
network if one is not found, otherwise it returns None.
"""
return IMPL.project_get_networks(context, project_id, associate)
###################
def console_pool_create(context, values):
"""Create console pool."""
return IMPL.console_pool_create(context, values)
def console_pool_get_by_host_type(context, compute_host, proxy_host,
console_type):
"""Fetch a console pool for a given proxy host, compute host, and type."""
return IMPL.console_pool_get_by_host_type(context,
compute_host,
proxy_host,
console_type)
def console_pool_get_all_by_host_type(context, host, console_type):
"""Fetch all pools for given proxy host and type."""
return IMPL.console_pool_get_all_by_host_type(context,
host,
console_type)
def console_create(context, values):
"""Create a console."""
return IMPL.console_create(context, values)
def console_delete(context, console_id):
"""Delete a console."""
return IMPL.console_delete(context, console_id)
def console_get_by_pool_instance(context, pool_id, instance_uuid):
"""Get console entry for a given instance and pool."""
return IMPL.console_get_by_pool_instance(context, pool_id, instance_uuid)
def console_get_all_by_instance(context, instance_uuid, columns_to_join=None):
"""Get consoles for a given instance."""
return IMPL.console_get_all_by_instance(context, instance_uuid,
columns_to_join)
def console_get(context, console_id, instance_uuid=None):
"""Get a specific console (possibly on a given instance)."""
return IMPL.console_get(context, console_id, instance_uuid)
##################
def flavor_create(context, values, projects=None):
"""Create a new instance type."""
return IMPL.flavor_create(context, values, projects=projects)
def flavor_get_all(context, inactive=False, filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
"""Get all instance flavors."""
return IMPL.flavor_get_all(
context, inactive=inactive, filters=filters, sort_key=sort_key,
sort_dir=sort_dir, limit=limit, marker=marker)
def flavor_get(context, id):
"""Get instance type by id."""
return IMPL.flavor_get(context, id)
def flavor_get_by_name(context, name):
"""Get instance type by name."""
return IMPL.flavor_get_by_name(context, name)
def flavor_get_by_flavor_id(context, id, read_deleted=None):
"""Get instance type by flavor id."""
return IMPL.flavor_get_by_flavor_id(context, id, read_deleted)
def flavor_destroy(context, name):
"""Delete an instance type."""
return IMPL.flavor_destroy(context, name)
def flavor_access_get_by_flavor_id(context, flavor_id):
"""Get flavor access by flavor id."""
return IMPL.flavor_access_get_by_flavor_id(context, flavor_id)
def flavor_access_add(context, flavor_id, project_id):
"""Add flavor access for project."""
return IMPL.flavor_access_add(context, flavor_id, project_id)
def flavor_access_remove(context, flavor_id, project_id):
"""Remove flavor access for project."""
return IMPL.flavor_access_remove(context, flavor_id, project_id)
def flavor_extra_specs_get(context, flavor_id):
"""Get all extra specs for an instance type."""
return IMPL.flavor_extra_specs_get(context, flavor_id)
def flavor_extra_specs_get_item(context, flavor_id, key):
"""Get extra specs by key and flavor_id."""
return IMPL.flavor_extra_specs_get_item(context, flavor_id, key)
def flavor_extra_specs_delete(context, flavor_id, key):
"""Delete the given extra specs item."""
IMPL.flavor_extra_specs_delete(context, flavor_id, key)
def flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs):
"""Create or update instance type extra specs.
This adds or modifies the key/value pairs specified in the
extra specs dict argument
"""
IMPL.flavor_extra_specs_update_or_create(context, flavor_id,
extra_specs)
####################
def pci_device_get_by_addr(context, node_id, dev_addr):
"""Get PCI device by address."""
return IMPL.pci_device_get_by_addr(context, node_id, dev_addr)
def pci_device_get_by_id(context, id):
"""Get PCI device by id."""
return IMPL.pci_device_get_by_id(context, id)
def pci_device_get_all_by_node(context, node_id):
"""Get all PCI devices for one host."""
return IMPL.pci_device_get_all_by_node(context, node_id)
def pci_device_get_all_by_instance_uuid(context, instance_uuid):
"""Get PCI devices allocated to instance."""
return IMPL.pci_device_get_all_by_instance_uuid(context, instance_uuid)
def pci_device_destroy(context, node_id, address):
"""Delete a PCI device record."""
return IMPL.pci_device_destroy(context, node_id, address)
def pci_device_update(context, node_id, address, value):
"""Update a pci device."""
return IMPL.pci_device_update(context, node_id, address, value)
###################
def cell_create(context, values):
"""Create a new child Cell entry."""
return IMPL.cell_create(context, values)
def cell_update(context, cell_name, values):
"""Update a child Cell entry."""
return IMPL.cell_update(context, cell_name, values)
def cell_delete(context, cell_name):
"""Delete a child Cell."""
return IMPL.cell_delete(context, cell_name)
def cell_get(context, cell_name):
"""Get a specific child Cell."""
return IMPL.cell_get(context, cell_name)
def cell_get_all(context):
"""Get all child Cells."""
return IMPL.cell_get_all(context)
####################
def instance_metadata_get(context, instance_uuid):
"""Get all metadata for an instance."""
return IMPL.instance_metadata_get(context, instance_uuid)
def instance_metadata_delete(context, instance_uuid, key):
"""Delete the given metadata item."""
IMPL.instance_metadata_delete(context, instance_uuid, key)
def instance_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
return IMPL.instance_metadata_update(context, instance_uuid,
metadata, delete)
####################
def instance_system_metadata_get(context, instance_uuid):
"""Get all system metadata for an instance."""
return IMPL.instance_system_metadata_get(context, instance_uuid)
def instance_system_metadata_update(context, instance_uuid, metadata, delete):
"""Update metadata if it exists, otherwise create it."""
IMPL.instance_system_metadata_update(
context, instance_uuid, metadata, delete)
####################
def agent_build_create(context, values):
"""Create a new agent build entry."""
return IMPL.agent_build_create(context, values)
def agent_build_get_by_triple(context, hypervisor, os, architecture):
"""Get agent build by hypervisor/OS/architecture triple."""
return IMPL.agent_build_get_by_triple(context, hypervisor, os,
architecture)
def agent_build_get_all(context, hypervisor=None):
"""Get all agent builds."""
return IMPL.agent_build_get_all(context, hypervisor)
def agent_build_destroy(context, agent_update_id):
"""Destroy agent build entry."""
IMPL.agent_build_destroy(context, agent_update_id)
def agent_build_update(context, agent_build_id, values):
"""Update agent build entry."""
IMPL.agent_build_update(context, agent_build_id, values)
####################
def bw_usage_get(context, uuid, start_period, mac, use_slave=False):
"""Return bw usage for instance and mac in a given audit period."""
return IMPL.bw_usage_get(context, uuid, start_period, mac)
def bw_usage_get_by_uuids(context, uuids, start_period):
"""Return bw usages for instance(s) in a given audit period."""
return IMPL.bw_usage_get_by_uuids(context, uuids, start_period)
def bw_usage_update(context, uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed=None,
update_cells=True):
"""Update cached bandwidth usage for an instance's network based on mac
address. Creates new record if needed.
"""
rv = IMPL.bw_usage_update(context, uuid, mac, start_period, bw_in,
bw_out, last_ctr_in, last_ctr_out, last_refreshed=last_refreshed)
if update_cells:
try:
cells_rpcapi.CellsAPI().bw_usage_update_at_top(context,
uuid, mac, start_period, bw_in, bw_out,
last_ctr_in, last_ctr_out, last_refreshed)
except Exception:
LOG.exception(_("Failed to notify cells of bw_usage update"))
return rv
###################
def vol_get_usage_by_time(context, begin):
"""Return volumes usage that have been updated after a specified time."""
return IMPL.vol_get_usage_by_time(context, begin)
def vol_usage_update(context, id, rd_req, rd_bytes, wr_req, wr_bytes,
instance_id, project_id, user_id, availability_zone,
update_totals=False):
"""Update cached volume usage for a volume
Creates new record if needed.
"""
return IMPL.vol_usage_update(context, id, rd_req, rd_bytes, wr_req,
wr_bytes, instance_id, project_id, user_id,
availability_zone,
update_totals=update_totals)
###################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id."""
return IMPL.s3_image_get(context, image_id)
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid."""
return IMPL.s3_image_get_by_uuid(context, image_uuid)
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid."""
return IMPL.s3_image_create(context, image_uuid)
####################
def aggregate_create(context, values, metadata=None):
"""Create a new aggregate with metadata."""
return IMPL.aggregate_create(context, values, metadata)
def aggregate_get(context, aggregate_id):
"""Get a specific aggregate by id."""
return IMPL.aggregate_get(context, aggregate_id)
def aggregate_get_by_host(context, host, key=None):
"""Get a list of aggregates that host belongs to."""
return IMPL.aggregate_get_by_host(context, host, key)
def aggregate_metadata_get_by_host(context, host, key=None):
"""Get metadata for all aggregates that host belongs to.
Returns a dictionary where each value is a set, this is to cover the case
where there two aggregates have different values for the same key.
Optional key filter
"""
return IMPL.aggregate_metadata_get_by_host(context, host, key)
def aggregate_metadata_get_by_metadata_key(context, aggregate_id, key):
"""Get metadata for an aggregate by metadata key."""
return IMPL.aggregate_metadata_get_by_metadata_key(context, aggregate_id,
key)
def aggregate_host_get_by_metadata_key(context, key):
"""Get hosts with a specific metadata key metadata for all aggregates.
Returns a dictionary where each key is a hostname and each value is a set
of the key values
return value: {machine: set( az1, az2 )}
"""
return IMPL.aggregate_host_get_by_metadata_key(context, key)
def aggregate_update(context, aggregate_id, values):
"""Update the attributes of an aggregates.
If values contains a metadata key, it updates the aggregate metadata too.
"""
return IMPL.aggregate_update(context, aggregate_id, values)
def aggregate_delete(context, aggregate_id):
"""Delete an aggregate."""
return IMPL.aggregate_delete(context, aggregate_id)
def aggregate_get_all(context):
"""Get all aggregates."""
return IMPL.aggregate_get_all(context)
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
"""Add/update metadata. If set_delete=True, it adds only."""
IMPL.aggregate_metadata_add(context, aggregate_id, metadata, set_delete)
def aggregate_metadata_get(context, aggregate_id):
"""Get metadata for the specified aggregate."""
return IMPL.aggregate_metadata_get(context, aggregate_id)
def aggregate_metadata_delete(context, aggregate_id, key):
"""Delete the given metadata key."""
IMPL.aggregate_metadata_delete(context, aggregate_id, key)
def aggregate_host_add(context, aggregate_id, host):
"""Add host to the aggregate."""
IMPL.aggregate_host_add(context, aggregate_id, host)
def aggregate_host_get_all(context, aggregate_id):
"""Get hosts for the specified aggregate."""
return IMPL.aggregate_host_get_all(context, aggregate_id)
def aggregate_host_delete(context, aggregate_id, host):
"""Delete the given host from the aggregate."""
IMPL.aggregate_host_delete(context, aggregate_id, host)
####################
def instance_fault_create(context, values):
"""Create a new Instance Fault."""
return IMPL.instance_fault_create(context, values)
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
return IMPL.instance_fault_get_by_instance_uuids(context, instance_uuids)
####################
def action_start(context, values):
"""Start an action for an instance."""
return IMPL.action_start(context, values)
def action_finish(context, values):
"""Finish an action for an instance."""
return IMPL.action_finish(context, values)
def actions_get(context, uuid):
"""Get all instance actions for the provided instance."""
return IMPL.actions_get(context, uuid)
def action_get_by_request_id(context, uuid, request_id):
"""Get the action by request_id and given instance."""
return IMPL.action_get_by_request_id(context, uuid, request_id)
def action_event_start(context, values):
"""Start an event on an instance action."""
return IMPL.action_event_start(context, values)
def action_event_finish(context, values):
"""Finish an event on an instance action."""
return IMPL.action_event_finish(context, values)
def action_events_get(context, action_id):
"""Get the events by action id."""
return IMPL.action_events_get(context, action_id)
def action_event_get_by_id(context, action_id, event_id):
return IMPL.action_event_get_by_id(context, action_id, event_id)
####################
def get_ec2_instance_id_by_uuid(context, instance_id):
"""Get ec2 id through uuid from instance_id_mappings table."""
return IMPL.get_ec2_instance_id_by_uuid(context, instance_id)
def get_instance_uuid_by_ec2_id(context, ec2_id):
"""Get uuid through ec2 id from instance_id_mappings table."""
return IMPL.get_instance_uuid_by_ec2_id(context, ec2_id)
def ec2_instance_create(context, instance_uuid, id=None):
"""Create the ec2 id to instance uuid mapping on demand."""
return IMPL.ec2_instance_create(context, instance_uuid, id)
def ec2_instance_get_by_uuid(context, instance_uuid):
return IMPL.ec2_instance_get_by_uuid(context, instance_uuid)
def ec2_instance_get_by_id(context, instance_id):
return IMPL.ec2_instance_get_by_id(context, instance_id)
####################
def task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message=None):
"""Mark a task as complete for a given host/time period."""
return IMPL.task_log_end_task(context, task_name,
period_beginning,
period_ending,
host,
errors,
message)
def task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items=None,
message=None):
"""Mark a task as started for a given host/time period."""
return IMPL.task_log_begin_task(context, task_name,
period_beginning,
period_ending,
host,
task_items,
message)
def task_log_get_all(context, task_name, period_beginning,
period_ending, host=None, state=None):
return IMPL.task_log_get_all(context, task_name, period_beginning,
period_ending, host, state)
def task_log_get(context, task_name, period_beginning,
period_ending, host, state=None):
return IMPL.task_log_get(context, task_name, period_beginning,
period_ending, host, state)
####################
def archive_deleted_rows(context, max_rows=None):
"""Move up to max_rows rows from production tables to corresponding shadow
tables.
:returns: number of rows archived.
"""
return IMPL.archive_deleted_rows(context, max_rows=max_rows)
def archive_deleted_rows_for_table(context, tablename, max_rows=None):
"""Move up to max_rows rows from tablename to corresponding shadow
table.
:returns: number of rows archived.
"""
return IMPL.archive_deleted_rows_for_table(context, tablename,
max_rows=max_rows)
| tanglei528/nova | nova/db/api.py | Python | apache-2.0 | 65,070 |
#!/usr/bin/python3
import MySQLdb
import os
import re
db = MySQLdb.connect("etos39.cn.ao.ericsson.se","automation","automation","gerrit_data_new")
# db = MySQLdb.connect("localhost","root","root","work" )
cursor = db.cursor()
cursor.execute('SELECT reviewer_username FROM comments GROUP BY reviewer_username')
usersList = cursor.fetchall()
UM = {}
for users in usersList:
for user in users:
if user != None:
outPut = os.popen('/usr/bin/ldapsearch -x -LLL -D "uid=COVESEOS,ou=Users,ou=Internal,o=ericsson" -w 1qaz\@WSX -b "uid='+user+',ou=Users,ou=Internal,o=ericsson" -h ecd.ericsson.se -p 389|grep eriOperationalManager:|awk \'{print $2}\'','r')
if outPut != None:
try:
param = []
param=(str(user),str(outPut.read()))
rule=re.compile(r'[^a-zA-z]')
username = rule.sub('',str(user))
managername = rule.sub('',param[1])
print(username)
cursor.execute("""INSERT INTO person(username,manager)VALUES(%s,%s)""",(username,managername))
db.commit()
except Exception as e:
print e
db.rollback() | KiviMao/kivi | Script/Show-Comments-story/Get-All-User.py | Python | apache-2.0 | 1,268 |
<?php
switch (@$_GET['action']) {
default:
$query = "SELECT * FROM ix_matchs ORDER BY id DESC";
$sql = mysql_query($query);
$texte='<br><table class="liste_table" cellpadding=0 cellspacing=2 align="center">
<tr>
<td class="liste_titre" width=20%>Date</td>
<td class="liste_titre" width=25%>Adversaire</td>
<td class="liste_titre" width=15%>Type</td>
<td class="liste_titre" width=20%>Score</td>
<td class="liste_titre" width=10%>Détail</td>
<td class="liste_titre" width=10%>Démos</td>
</tr>
';
while($data = mysql_fetch_array($sql)) {
if ($data['score1']<$data['score2']) { $color="#FFC8C8"; $colortxt="#E71B1B"; @$perdu++; }
if ($data['score1']>$data['score2']) { $color="#D0F8C8"; $colortxt="#52C174"; @$gagne++; }
if ($data['score1']==$data['score2']) { $color="#C8D8FF"; $colortxt="#3A37CE"; @$egalite++; }
if (!empty($data['site_adv'])) { $adversaire="<a href=\"".$data['site_adv']."\" target=\"_blank\">".$data['adversaire']."</a>"; }
else { $adversaire=$data['adversaire']; }
if (!empty($data['hltv'])) { $demo='<a href="'.$data['hltv'].'" target="_blank"><img src="images/video.png" border=0 style="border:1px solid #FFFFFF;" OnMouseOver="this.style.border=\'1px outset #F7A118\'" OnMouseOut="this.style.border=\'1px solid #FFFFFF\'"></a>'; }
else if (!empty($data['screen'])) { $demo='<a href="'.$data['screen'].'" target="_blank"><img src="images/screen.png" border=0 style="border:1px solid #FFFFFF;" OnMouseOver="this.style.border=\'1px outset #F7A118\'" OnMouseOut="this.style.border=\'1px solid #FFFFFF\'"></a>'; }
else $demo="-";
$texte.=" <tr>
<td class='liste_txt'>".inverser_date($data['date'])."</td>
<td class='liste_txt'>$adversaire</td>
<td class='liste_txt'>".$data['type']."</td>
<td class='liste_txt' bgcolor='$color'><font color='$colortxt'><b>".$data['score1']."/".$data['score2']."</b></font></td>
<td class='liste_txt'><a href=\"?page=matchs&action=detail&id=".$data['id']."\"><img src='images/rapport.png' border=0 style=\"border:1px solid #FFFFFF;\" OnMouseOver=\"this.style.border='1px outset #108AFB'\" OnMouseOut=\"this.style.border='1px solid #FFFFFF'\"></a></td>
<td class='liste_txt'>$demo</td>
</tr>";
}
$texte.="</table><p align=\"center\"><img src=\"images/carre_vert.jpg\" alt=\"\" name=\"carre_rouge\" width=\"10\" height=\"10\" id=\"carre_rouge\" /> Gagné - <img src=\"images/carre_rouge.jpg\" alt=\"\" name=\"carre_rouge\" width=\"10\" height=\"10\" id=\"carre_rouge\" /> Perdu - <img src=\"images/carre_bleu.jpg\" alt=\"\" name=\"carre_rouge\" width=\"10\" height=\"10\" id=\"carre_rouge\" /> Egalité";
$texte.="<br><br><br><b>$gagne</b> matchs gagnés, <b>$perdu</b> matchs perdus et <b>$egalite</b> égalités.<br></p>";
$afficher->AddSession($handle, "contenu");
$afficher->setVar($handle, "contenu.module_titre", "Liste des matchs");
$afficher->setVar($handle, "contenu.module_texte", $texte );
$afficher->CloseSession($handle, "contenu");
break;
case "detail":
$query = "SELECT * FROM ix_matchs WHERE id=".$_GET['id'];
$sql = mysql_query($query); $data=mysql_fetch_object($sql);
(file_exists("images/maps/" . $data->map1 . ".jpg")) ? $urlimg1="images/maps/" . $data->map1 . ".jpg" : $urlimg1="images/maps/none.jpg";
(file_exists("images/maps/" . $data->map2 . ".jpg")) ? $urlimg2="images/maps/" . $data->map2 . ".jpg" : $urlimg2="images/maps/none.jpg";
if (isset($data->map1)) { $imgmap1='<img src="'.$urlimg1.'" style="border:1px solid #000000">'; }
if (isset($data->map2)) { $imgmap2='<img src="'.$urlimg2.'" style="border:1px solid #000000">'; }
$maps='<table width="322" align="center" cellpadding="0" cellspacing="0" >
<tr bgcolor="#000000">
<td><div align="center"><font color="#FFFFFF">Map 1 : <b>'.$data->map1.'</b></font></div></td>
<td><div align="center"><font color="#FFFFFF">Map 2 : <b>'.$data->map2.'</b></font></div></td>
</tr>
<tr>
<td>'.@$imgmap1.'</td>
<td>'.@$imgmap2.'</td>
</tr>
</table>';
$texte='<p align="center"><br /><span class="txt2" style="font-size:13px; font-weight:bold"> Détail du Match contre les '.$data->adversaire.'</SPAN></p>
<p>'.$maps.'<br><br>
<span class="txt2">Date</span> : '.inverser_date($data->date).@$case.'<br />
<span class="txt2">Type</span> : '.$data->type.'<br />
<span class="txt2">Line Up</span> : '.$data->lineup.'<br />
<span class="txt2">Score</span> : <b>'.$data->score1.'</b> / '.$data->score2.'</p>
<p><span class="txt2">Rapport : </span><br />
'.$data->rapport.'<br />
<br />
<span class="txt2"> Lien Démo </span>: <a href="'.$data->hltv.'" target="_blank">'.$data->hltv.'</a><br />
<span class="txt2">Lien Screen </span>: <a href="'.$data->screen.'" target="_blank">'.$data->screen.'</a> </p>
<p align="center"><br>- Posté un commentaire - ( a venir ! )<br>
</p>';
$afficher->AddSession($handle, "contenu");
$afficher->setVar($handle, "contenu.module_titre", "Détail d'un match");
$afficher->setVar($handle, "contenu.module_texte", $texte );
$afficher->CloseSession($handle, "contenu");
}
?> | studiodev/archives | 2005 - PortiX-Team (CMS)/pages/matchs.php | PHP | apache-2.0 | 5,562 |
/*
Copyright 2014 Rustici Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Web;
using Newtonsoft.Json.Linq;
using TinCan.Documents;
using TinCan.LRSResponses;
namespace TinCan
{
public class RemoteLRS : ILRS
{
public Uri endpoint { get; set; }
public TCAPIVersion version { get; set; }
public String auth { get; set; }
public Dictionary<String, String> extended { get; set; }
public void SetAuth(String username, String password)
{
auth = "Basic " + Convert.ToBase64String(Encoding.UTF8.GetBytes(username + ":" + password));
}
public RemoteLRS() { }
public RemoteLRS(Uri endpoint, TCAPIVersion version, String username, String password)
{
this.endpoint = endpoint;
this.version = version;
this.SetAuth(username, password);
}
public RemoteLRS(String endpoint, TCAPIVersion version, String username, String password) : this(new Uri(endpoint), version, username, password) { }
public RemoteLRS(String endpoint, String username, String password) : this(endpoint, TCAPIVersion.latest(), username, password) { }
private class MyHTTPRequest
{
public String method { get; set; }
public String resource { get; set; }
public Dictionary<String, String> queryParams { get; set; }
public Dictionary<String, String> headers { get; set; }
public String contentType { get; set; }
public byte[] content { get; set; }
}
private class MyHTTPResponse
{
public HttpStatusCode status { get; set; }
public String contentType { get; set; }
public byte[] content { get; set; }
public DateTime lastModified { get; set; }
public String etag { get; set; }
public Exception ex { get; set; }
public MyHTTPResponse() { }
public MyHTTPResponse(HttpWebResponse webResp)
{
status = webResp.StatusCode;
contentType = webResp.ContentType;
etag = webResp.Headers.Get("Etag");
lastModified = webResp.LastModified;
using (var stream = webResp.GetResponseStream())
{
content = ReadFully(stream, (int)webResp.ContentLength);
}
}
}
private MyHTTPResponse MakeSyncRequest(MyHTTPRequest req)
{
String url;
if (req.resource.StartsWith("http", StringComparison.InvariantCultureIgnoreCase))
{
url = req.resource;
}
else
{
url = endpoint.ToString();
if (! url.EndsWith("/") && ! req.resource.StartsWith("/")) {
url += "/";
}
url += req.resource;
}
if (req.queryParams != null)
{
String qs = "";
foreach (KeyValuePair<String, String> entry in req.queryParams)
{
if (qs != "")
{
qs += "&";
}
qs += HttpUtility.UrlEncode(entry.Key) + "=" + HttpUtility.UrlEncode(entry.Value);
}
if (qs != "")
{
url += "?" + qs;
}
}
// TODO: handle special properties we recognize, such as content type, modified since, etc.
var webReq = (HttpWebRequest)WebRequest.Create(url);
webReq.Method = req.method;
webReq.Headers.Add("X-Experience-API-Version", version.ToString());
if (auth != null)
{
webReq.Headers.Add("Authorization", auth);
}
if (req.headers != null)
{
foreach (KeyValuePair<String, String> entry in req.headers)
{
webReq.Headers.Add(entry.Key, entry.Value);
}
}
if (req.contentType != null)
{
webReq.ContentType = req.contentType;
}
else
{
webReq.ContentType = "application/octet-stream";
}
if (req.content != null)
{
webReq.ContentLength = req.content.Length;
using (var stream = webReq.GetRequestStream())
{
stream.Write(req.content, 0, req.content.Length);
}
}
MyHTTPResponse resp;
try
{
using (var webResp = (HttpWebResponse)webReq.GetResponse())
{
resp = new MyHTTPResponse(webResp);
}
}
catch (WebException ex)
{
if (ex.Response != null)
{
using (var webResp = (HttpWebResponse)ex.Response)
{
resp = new MyHTTPResponse(webResp);
}
}
else
{
resp = new MyHTTPResponse();
resp.content = Encoding.UTF8.GetBytes("Web exception without '.Response'");
}
resp.ex = ex;
}
return resp;
}
/// <summary>
/// See http://www.yoda.arachsys.com/csharp/readbinary.html no license found
///
/// Reads data from a stream until the end is reached. The
/// data is returned as a byte array. An IOException is
/// thrown if any of the underlying IO calls fail.
/// </summary>
/// <param name="stream">The stream to read data from</param>
/// <param name="initialLength">The initial buffer length</param>
private static byte[] ReadFully(Stream stream, int initialLength)
{
// If we've been passed an unhelpful initial length, just
// use 32K.
if (initialLength < 1)
{
initialLength = 32768;
}
byte[] buffer = new byte[initialLength];
int read = 0;
int chunk;
while ((chunk = stream.Read(buffer, read, buffer.Length - read)) > 0)
{
read += chunk;
// If we've reached the end of our buffer, check to see if there's
// any more information
if (read == buffer.Length)
{
int nextByte = stream.ReadByte();
// End of stream? If so, we're done
if (nextByte == -1)
{
return buffer;
}
// Nope. Resize the buffer, put in the byte we've just
// read, and continue
byte[] newBuffer = new byte[buffer.Length * 2];
Array.Copy(buffer, newBuffer, buffer.Length);
newBuffer[read] = (byte)nextByte;
buffer = newBuffer;
read++;
}
}
// Buffer is now too big. Shrink it.
byte[] ret = new byte[read];
Array.Copy(buffer, ret, read);
return ret;
}
private MyHTTPResponse GetDocument(String resource, Dictionary<String, String> queryParams, Document document)
{
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = resource;
req.queryParams = queryParams;
var res = MakeSyncRequest(req);
if (res.status == HttpStatusCode.OK)
{
document.content = res.content;
document.contentType = res.contentType;
document.timestamp = res.lastModified;
document.etag = res.etag;
}
return res;
}
private ProfileKeysLRSResponse GetProfileKeys(String resource, Dictionary<String, String> queryParams)
{
var r = new ProfileKeysLRSResponse();
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = resource;
req.queryParams = queryParams;
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
var keys = JArray.Parse(Encoding.UTF8.GetString(res.content));
if (keys.Count > 0) {
r.content = new List<String>();
foreach (JToken key in keys) {
r.content.Add((String)key);
}
}
return r;
}
private LRSResponse SaveDocument(String resource, Dictionary<String, String> queryParams, Document document)
{
var r = new LRSResponse();
var req = new MyHTTPRequest();
req.method = "PUT";
req.resource = resource;
req.queryParams = queryParams;
req.contentType = document.contentType;
req.content = document.content;
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.NoContent)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
return r;
}
private LRSResponse DeleteDocument(String resource, Dictionary<String, String> queryParams)
{
var r = new LRSResponse();
var req = new MyHTTPRequest();
req.method = "DELETE";
req.resource = resource;
req.queryParams = queryParams;
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.NoContent)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
return r;
}
private StatementLRSResponse GetStatement(Dictionary<String, String> queryParams)
{
var r = new StatementLRSResponse();
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = "statements";
req.queryParams = queryParams;
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
r.content = new Statement(new Json.StringOfJSON(Encoding.UTF8.GetString(res.content)));
return r;
}
public AboutLRSResponse About()
{
var r = new AboutLRSResponse();
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = "about";
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
r.content = new About(Encoding.UTF8.GetString(res.content));
return r;
}
public StatementLRSResponse SaveStatement(Statement statement)
{
var r = new StatementLRSResponse();
var req = new MyHTTPRequest();
req.queryParams = new Dictionary<String, String>();
req.resource = "statements";
if (statement.id == null)
{
req.method = "POST";
}
else
{
req.method = "PUT";
req.queryParams.Add("statementId", statement.id.ToString());
}
req.contentType = "application/json";
req.content = Encoding.UTF8.GetBytes(statement.ToJSON(version));
var res = MakeSyncRequest(req);
if (statement.id == null)
{
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
var ids = JArray.Parse(Encoding.UTF8.GetString(res.content));
statement.id = new Guid((String)ids[0]);
}
else {
if (res.status != HttpStatusCode.NoContent)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
}
r.success = true;
r.content = statement;
return r;
}
public StatementsResultLRSResponse SaveStatements(List<Statement> statements)
{
var r = new StatementsResultLRSResponse();
var req = new MyHTTPRequest();
req.resource = "statements";
req.method = "POST";
req.contentType = "application/json";
var jarray = new JArray();
foreach (Statement st in statements)
{
jarray.Add(st.ToJObject(version));
}
req.content = Encoding.UTF8.GetBytes(jarray.ToString());
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
var ids = JArray.Parse(Encoding.UTF8.GetString(res.content));
for (int i = 0; i < ids.Count; i++)
{
statements[i].id = new Guid((String)ids[i]);
}
r.success = true;
r.content = new StatementsResult(statements);
return r;
}
public StatementLRSResponse RetrieveStatement(Guid id)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("statementId", id.ToString());
return GetStatement(queryParams);
}
public StatementLRSResponse RetrieveVoidedStatement(Guid id)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("voidedStatementId", id.ToString());
return GetStatement(queryParams);
}
public StatementsResultLRSResponse QueryStatements(StatementsQuery query)
{
var r = new StatementsResultLRSResponse();
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = "statements";
req.queryParams = query.ToParameterMap(version);
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
r.content = new StatementsResult(new Json.StringOfJSON(Encoding.UTF8.GetString(res.content)));
return r;
}
public StatementsResultLRSResponse MoreStatements(StatementsResult result)
{
var r = new StatementsResultLRSResponse();
var req = new MyHTTPRequest();
req.method = "GET";
req.resource = endpoint.GetLeftPart(UriPartial.Authority);
if (! req.resource.EndsWith("/")) {
req.resource += "/";
}
req.resource += result.more;
var res = MakeSyncRequest(req);
if (res.status != HttpStatusCode.OK)
{
r.success = false;
r.httpException = res.ex;
r.SetErrMsgFromBytes(res.content);
return r;
}
r.success = true;
r.content = new StatementsResult(new Json.StringOfJSON(Encoding.UTF8.GetString(res.content)));
return r;
}
// TODO: since param
public ProfileKeysLRSResponse RetrieveStateIds(Activity activity, Agent agent, Nullable<Guid> registration = null)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("activityId", activity.id.ToString());
queryParams.Add("agent", agent.ToJSON(version));
if (registration != null)
{
queryParams.Add("registration", registration.ToString());
}
return GetProfileKeys("activities/state", queryParams);
}
public StateLRSResponse RetrieveState(String id, Activity activity, Agent agent, Nullable<Guid> registration = null)
{
var r = new StateLRSResponse();
var queryParams = new Dictionary<String, String>();
queryParams.Add("stateId", id);
queryParams.Add("activityId", activity.id.ToString());
queryParams.Add("agent", agent.ToJSON(version));
var state = new StateDocument();
state.id = id;
state.activity = activity;
state.agent = agent;
if (registration != null)
{
queryParams.Add("registration", registration.ToString());
state.registration = registration;
}
var resp = GetDocument("activities/state", queryParams, state);
if (resp.status != HttpStatusCode.OK && resp.status != HttpStatusCode.NotFound)
{
r.success = false;
r.httpException = resp.ex;
r.SetErrMsgFromBytes(resp.content);
return r;
}
r.success = true;
return r;
}
public LRSResponse SaveState(StateDocument state)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("stateId", state.id);
queryParams.Add("activityId", state.activity.id.ToString());
queryParams.Add("agent", state.agent.ToJSON(version));
if (state.registration != null)
{
queryParams.Add("registration", state.registration.ToString());
}
return SaveDocument("activities/state", queryParams, state);
}
public LRSResponse DeleteState(StateDocument state)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("stateId", state.id);
queryParams.Add("activityId", state.activity.id.ToString());
queryParams.Add("agent", state.agent.ToJSON(version));
if (state.registration != null)
{
queryParams.Add("registration", state.registration.ToString());
}
return DeleteDocument("activities/state", queryParams);
}
public LRSResponse ClearState(Activity activity, Agent agent, Nullable<Guid> registration = null)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("activityId", activity.id.ToString());
queryParams.Add("agent", agent.ToJSON(version));
if (registration != null)
{
queryParams.Add("registration", registration.ToString());
}
return DeleteDocument("activities/state", queryParams);
}
// TODO: since param
public ProfileKeysLRSResponse RetrieveActivityProfileIds(Activity activity)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("activityId", activity.id.ToString());
return GetProfileKeys("activities/profile", queryParams);
}
public ActivityProfileLRSResponse RetrieveActivityProfile(String id, Activity activity)
{
var r = new ActivityProfileLRSResponse();
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", id);
queryParams.Add("activityId", activity.id.ToString());
var profile = new ActivityProfileDocument();
profile.id = id;
profile.activity = activity;
var resp = GetDocument("activities/profile", queryParams, profile);
if (resp.status != HttpStatusCode.OK && resp.status != HttpStatusCode.NotFound)
{
r.success = false;
r.httpException = resp.ex;
r.SetErrMsgFromBytes(resp.content);
return r;
}
r.success = true;
return r;
}
public LRSResponse SaveActivityProfile(ActivityProfileDocument profile)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", profile.id);
queryParams.Add("activityId", profile.activity.id.ToString());
return SaveDocument("activities/profile", queryParams, profile);
}
public LRSResponse DeleteActivityProfile(ActivityProfileDocument profile)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", profile.id);
queryParams.Add("activityId", profile.activity.id.ToString());
// TODO: need to pass Etag?
return DeleteDocument("activities/profile", queryParams);
}
// TODO: since param
public ProfileKeysLRSResponse RetrieveAgentProfileIds(Agent agent)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("agent", agent.ToJSON(version));
return GetProfileKeys("agents/profile", queryParams);
}
public AgentProfileLRSResponse RetrieveAgentProfile(String id, Agent agent)
{
var r = new AgentProfileLRSResponse();
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", id);
queryParams.Add("agent", agent.ToJSON(version));
var profile = new AgentProfileDocument();
profile.id = id;
profile.agent = agent;
var resp = GetDocument("agents/profile", queryParams, profile);
if (resp.status != HttpStatusCode.OK && resp.status != HttpStatusCode.NotFound)
{
r.success = false;
r.httpException = resp.ex;
r.SetErrMsgFromBytes(resp.content);
return r;
}
r.success = true;
return r;
}
public LRSResponse SaveAgentProfile(AgentProfileDocument profile)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", profile.id);
queryParams.Add("agent", profile.agent.ToJSON(version));
return SaveDocument("agents/profile", queryParams, profile);
}
public LRSResponse DeleteAgentProfile(AgentProfileDocument profile)
{
var queryParams = new Dictionary<String, String>();
queryParams.Add("profileId", profile.id);
queryParams.Add("agent", profile.agent.ToJSON(version));
// TODO: need to pass Etag?
return DeleteDocument("agents/profile", queryParams);
}
}
}
| nagyistoce/TinCan.NET | TinCan/RemoteLRS.cs | C# | apache-2.0 | 24,709 |
var stage, board, tiles, fleets, scale, sWid, is_dragging;
var lastMouse = { x:0, y:0 };
var is_dragging = false;
$(document).ready(function() {
init_stage();
document.addEventListener('keyup', handleKeyUp, false);
document.addEventListener('keydown', handleKeyDown, false);
loadLobby();
});
/**
* Called from createAll in game.js after all assets are loaded.
* Clears old game globals, re-sets defaults.
*/
var setGlobals = function() {
stage.removeChild(board);
board = new createjs.Container();
tiles = [];
fleetshapes = {};
scale = 0.60;
};
var handleKeyUp = function( e ) {
switch (e.keyCode) {
case 189: // dash
zoomBoard(-0.05);
break;
case 187: // equals (plus sign)
zoomBoard(0.05);
break;
default:
break;
}
};
var handleKeyDown = function( e ) {
switch (e.keyCode) {
case 37: // left arrow
moveBoard(-1, 0);
break;
case 38: // up arrow
moveBoard(0, -1);
break;
case 39:
moveBoard(1, 0);
break;
case 40:
moveBoard(0, 1);
break;
default:
break;
}
};
/**
* Calls init and draw functions for each tile in game board
*/
var createBoard = function() {
if (stage) {
initSelection();
updateCanvasSize();
drawAsteroids();
drawTiles();
drawNoFlyZones();
drawBases();
drawAgents();
drawFleets();
drawSprites();
stage.addChild( board );
scale = 0.75;
var boardWidth = 7 * sWid * scale;
var boardHeight = 7 * sWid * scale;
board.x = (window.innerWidth - boardWidth) / 2.0;
board.y = (window.innerHeight - boardHeight) / 2.0;
board.scaleX = scale;
board.scaleY = scale;
fadeIn(board, 1000, false, false);
}
};
var drawAsteroids = function() {
var asteroids = clientGame.game.board.asteroids;
for ( var a = 0; a < asteroids.length; a++ ) {
drawAsteroid( asteroids[a] );
}
};
var drawTiles = function() {
var planets = clientGame.game.board.planets;
for ( var p = 0; p < planets.length; p++ ) {
initTile(p);
drawTile(p);
}
};
var drawFleets = function() {
initFleets();
var planets = clientGame.game.board.planets;
for ( var p = 0; p < planets.length; p++ ) {
updateFleets(p);
}
};
var drawNoFlyZones = function() {
var planets = clientGame.game.board.planets;
initNoFlyZones();
updateNoFlyZones();
};
var drawBases = function() {
var planets = clientGame.game.board.planets;
initBases();
for ( var p = 0; p < planets.length; p++ ) {
updateBases(p);
}
};
var drawAgents = function() {
initAgents();
var planets = clientGame.game.board.planets;
for ( var p = 0; p < planets.length; p++ ) {
updateAgents(p);
}
};
/**
* Calls function to turn mouse enablement on/off on different
* createJS containers based on what action the user is in.
*/
var updateBoardInteractivity = function() {
var planets = clientGame.game.board.planets;
for ( var p = 0; p < planets.length; p++ ) {
updateTileInteractivity(p);
}
updateFleetsInteractivity();
updateBasesInteractivity();
updateAgentsInteractivity();
};
/**
* Calls update functions on each tile to update appearance, interactivity
* based on current pending action or game event
*/
var updateBoard = function() {
var planets = clientGame.game.board.planets;
// this sets all bases to invisible. Update function will reveal
// and draw any that are on planets.
updateRemovedBases();
for ( var p = 0; p < planets.length; p++ ) {
updateTileInteractivity(p);
updateTileImage(p);
updateFleets(p);
updateBases(p);
updateAgents(p);
}
updateNoFlyZones();
updateRemovedFleets();
updateDeadAgents();
stage.update();
}; | Zebbeni/alien-empire | client/game_board.js | JavaScript | apache-2.0 | 3,574 |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/ads/googleads/v1/errors/conversion_action_error.proto
package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors"
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "google.golang.org/genproto/googleapis/api/annotations"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// Enum describing possible conversion action errors.
type ConversionActionErrorEnum_ConversionActionError int32
const (
// Enum unspecified.
ConversionActionErrorEnum_UNSPECIFIED ConversionActionErrorEnum_ConversionActionError = 0
// The received error code is not known in this version.
ConversionActionErrorEnum_UNKNOWN ConversionActionErrorEnum_ConversionActionError = 1
// The specified conversion action name already exists.
ConversionActionErrorEnum_DUPLICATE_NAME ConversionActionErrorEnum_ConversionActionError = 2
// Another conversion action with the specified app id already exists.
ConversionActionErrorEnum_DUPLICATE_APP_ID ConversionActionErrorEnum_ConversionActionError = 3
// Android first open action conflicts with Google play codeless download
// action tracking the same app.
ConversionActionErrorEnum_TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD ConversionActionErrorEnum_ConversionActionError = 4
// Android first open action conflicts with Google play codeless download
// action tracking the same app.
ConversionActionErrorEnum_BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION ConversionActionErrorEnum_ConversionActionError = 5
// The attribution model cannot be set to DATA_DRIVEN because a data-driven
// model has never been generated.
ConversionActionErrorEnum_DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED ConversionActionErrorEnum_ConversionActionError = 6
// The attribution model cannot be set to DATA_DRIVEN because the
// data-driven model is expired.
ConversionActionErrorEnum_DATA_DRIVEN_MODEL_EXPIRED ConversionActionErrorEnum_ConversionActionError = 7
// The attribution model cannot be set to DATA_DRIVEN because the
// data-driven model is stale.
ConversionActionErrorEnum_DATA_DRIVEN_MODEL_STALE ConversionActionErrorEnum_ConversionActionError = 8
// The attribution model cannot be set to DATA_DRIVEN because the
// data-driven model is unavailable or the conversion action was newly
// added.
ConversionActionErrorEnum_DATA_DRIVEN_MODEL_UNKNOWN ConversionActionErrorEnum_ConversionActionError = 9
)
var ConversionActionErrorEnum_ConversionActionError_name = map[int32]string{
0: "UNSPECIFIED",
1: "UNKNOWN",
2: "DUPLICATE_NAME",
3: "DUPLICATE_APP_ID",
4: "TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD",
5: "BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION",
6: "DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED",
7: "DATA_DRIVEN_MODEL_EXPIRED",
8: "DATA_DRIVEN_MODEL_STALE",
9: "DATA_DRIVEN_MODEL_UNKNOWN",
}
var ConversionActionErrorEnum_ConversionActionError_value = map[string]int32{
"UNSPECIFIED": 0,
"UNKNOWN": 1,
"DUPLICATE_NAME": 2,
"DUPLICATE_APP_ID": 3,
"TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD": 4,
"BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION": 5,
"DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED": 6,
"DATA_DRIVEN_MODEL_EXPIRED": 7,
"DATA_DRIVEN_MODEL_STALE": 8,
"DATA_DRIVEN_MODEL_UNKNOWN": 9,
}
func (x ConversionActionErrorEnum_ConversionActionError) String() string {
return proto.EnumName(ConversionActionErrorEnum_ConversionActionError_name, int32(x))
}
func (ConversionActionErrorEnum_ConversionActionError) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_conversion_action_error_6bc78838b2a5587b, []int{0, 0}
}
// Container for enum describing possible conversion action errors.
type ConversionActionErrorEnum struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ConversionActionErrorEnum) Reset() { *m = ConversionActionErrorEnum{} }
func (m *ConversionActionErrorEnum) String() string { return proto.CompactTextString(m) }
func (*ConversionActionErrorEnum) ProtoMessage() {}
func (*ConversionActionErrorEnum) Descriptor() ([]byte, []int) {
return fileDescriptor_conversion_action_error_6bc78838b2a5587b, []int{0}
}
func (m *ConversionActionErrorEnum) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ConversionActionErrorEnum.Unmarshal(m, b)
}
func (m *ConversionActionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ConversionActionErrorEnum.Marshal(b, m, deterministic)
}
func (dst *ConversionActionErrorEnum) XXX_Merge(src proto.Message) {
xxx_messageInfo_ConversionActionErrorEnum.Merge(dst, src)
}
func (m *ConversionActionErrorEnum) XXX_Size() int {
return xxx_messageInfo_ConversionActionErrorEnum.Size(m)
}
func (m *ConversionActionErrorEnum) XXX_DiscardUnknown() {
xxx_messageInfo_ConversionActionErrorEnum.DiscardUnknown(m)
}
var xxx_messageInfo_ConversionActionErrorEnum proto.InternalMessageInfo
func init() {
proto.RegisterType((*ConversionActionErrorEnum)(nil), "google.ads.googleads.v1.errors.ConversionActionErrorEnum")
proto.RegisterEnum("google.ads.googleads.v1.errors.ConversionActionErrorEnum_ConversionActionError", ConversionActionErrorEnum_ConversionActionError_name, ConversionActionErrorEnum_ConversionActionError_value)
}
func init() {
proto.RegisterFile("google/ads/googleads/v1/errors/conversion_action_error.proto", fileDescriptor_conversion_action_error_6bc78838b2a5587b)
}
var fileDescriptor_conversion_action_error_6bc78838b2a5587b = []byte{
// 443 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x8b, 0xd3, 0x40,
0x14, 0xc7, 0x6d, 0x56, 0x77, 0x75, 0x16, 0x74, 0x18, 0x14, 0xd9, 0x55, 0xf7, 0x50, 0xf0, 0xe0,
0xc1, 0x84, 0xb0, 0x07, 0x21, 0x7a, 0x79, 0xcd, 0x8c, 0x61, 0x30, 0x9d, 0x09, 0x49, 0x9a, 0x8a,
0x14, 0x86, 0xd8, 0x94, 0x50, 0xd8, 0xcd, 0x94, 0x4c, 0xed, 0x07, 0xf2, 0xe8, 0x47, 0xf1, 0xe6,
0xd7, 0xf0, 0xe2, 0xc9, 0xbb, 0x24, 0xb3, 0xad, 0x87, 0xed, 0xf6, 0x34, 0x7f, 0xde, 0xfb, 0xff,
0xfe, 0x09, 0xef, 0x3d, 0xf4, 0xa1, 0xd6, 0xba, 0xbe, 0x5a, 0x78, 0x65, 0x65, 0x3c, 0x2b, 0x3b,
0xb5, 0xf1, 0xbd, 0x45, 0xdb, 0xea, 0xd6, 0x78, 0x73, 0xdd, 0x6c, 0x16, 0xad, 0x59, 0xea, 0x46,
0x95, 0xf3, 0x75, 0xf7, 0xf4, 0x0d, 0x77, 0xd5, 0xea, 0xb5, 0x26, 0x17, 0x16, 0x71, 0xcb, 0xca,
0xb8, 0x3b, 0xda, 0xdd, 0xf8, 0xae, 0xa5, 0xcf, 0x5f, 0x6e, 0xd3, 0x57, 0x4b, 0xaf, 0x6c, 0x1a,
0xbd, 0x2e, 0xbb, 0x08, 0x63, 0xe9, 0xe1, 0x1f, 0x07, 0x9d, 0x85, 0xbb, 0x7c, 0xe8, 0xe3, 0x59,
0x07, 0xb2, 0xe6, 0xdb, 0xf5, 0xf0, 0x97, 0x83, 0x9e, 0xed, 0xed, 0x92, 0x27, 0xe8, 0x74, 0x22,
0xb2, 0x84, 0x85, 0xfc, 0x23, 0x67, 0x14, 0xdf, 0x23, 0xa7, 0xe8, 0x64, 0x22, 0x3e, 0x09, 0x39,
0x15, 0x78, 0x40, 0x08, 0x7a, 0x4c, 0x27, 0x49, 0xcc, 0x43, 0xc8, 0x99, 0x12, 0x30, 0x66, 0xd8,
0x21, 0x4f, 0x11, 0xfe, 0x5f, 0x83, 0x24, 0x51, 0x9c, 0xe2, 0x23, 0xf2, 0x0e, 0x5d, 0xe6, 0x53,
0xa9, 0x42, 0x29, 0x0a, 0x96, 0x66, 0x5c, 0x0a, 0x05, 0x61, 0xce, 0xa5, 0xc8, 0xd4, 0x88, 0x53,
0xca, 0x45, 0xa4, 0xa4, 0x50, 0x19, 0x8c, 0x2d, 0x42, 0xe5, 0x54, 0xc4, 0x12, 0x28, 0xbe, 0x4f,
0x7c, 0xf4, 0xf6, 0x90, 0x43, 0x41, 0xa6, 0xa2, 0x58, 0x8e, 0x20, 0xbe, 0x09, 0xc4, 0x0f, 0xc8,
0x1b, 0xf4, 0x9a, 0x42, 0x0e, 0x8a, 0xa6, 0xbc, 0x60, 0x42, 0x8d, 0x25, 0x65, 0xb1, 0x9a, 0x42,
0xa6, 0x04, 0x2b, 0x58, 0xaa, 0x22, 0x26, 0x58, 0x0a, 0x39, 0xa3, 0xf8, 0x98, 0xbc, 0x42, 0x67,
0xb7, 0xad, 0xec, 0x73, 0xc2, 0x53, 0x46, 0xf1, 0x09, 0x79, 0x81, 0x9e, 0xdf, 0x6e, 0x67, 0x39,
0xc4, 0x0c, 0x3f, 0xdc, 0xcf, 0x6e, 0x67, 0xf3, 0x68, 0xf4, 0x77, 0x80, 0x86, 0x73, 0x7d, 0xed,
0x1e, 0x5e, 0xdb, 0xe8, 0x7c, 0xef, 0xdc, 0x93, 0x6e, 0x69, 0xc9, 0xe0, 0x0b, 0xbd, 0xa1, 0x6b,
0x7d, 0x55, 0x36, 0xb5, 0xab, 0xdb, 0xda, 0xab, 0x17, 0x4d, 0xbf, 0xd2, 0xed, 0x09, 0xad, 0x96,
0xe6, 0xae, 0x8b, 0x7a, 0x6f, 0x9f, 0xef, 0xce, 0x51, 0x04, 0xf0, 0xc3, 0xb9, 0x88, 0x6c, 0x18,
0x54, 0xc6, 0xb5, 0xb2, 0x53, 0x85, 0xef, 0xf6, 0x9f, 0x34, 0x3f, 0xb7, 0x86, 0x19, 0x54, 0x66,
0xb6, 0x33, 0xcc, 0x0a, 0x7f, 0x66, 0x0d, 0xbf, 0x9d, 0xa1, 0xad, 0x06, 0x01, 0x54, 0x26, 0x08,
0x76, 0x96, 0x20, 0x28, 0xfc, 0x20, 0xb0, 0xa6, 0xaf, 0xc7, 0xfd, 0xdf, 0x5d, 0xfe, 0x0b, 0x00,
0x00, 0xff, 0xff, 0x0c, 0xe0, 0x92, 0x88, 0xee, 0x02, 0x00, 0x00,
}
| ptinsley/fling | vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_action_error.pb.go | GO | apache-2.0 | 8,831 |
/*
* Copyright 2017 Mirko Sertic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.mirkosertic.bytecoder.integrationtest;
import de.mirkosertic.bytecoder.api.Import;
import de.mirkosertic.bytecoder.api.web.AnimationFrameCallback;
import de.mirkosertic.bytecoder.api.web.CanvasRenderingContext2D;
import de.mirkosertic.bytecoder.api.web.MouseEvent;
import de.mirkosertic.bytecoder.api.web.Document;
import de.mirkosertic.bytecoder.api.web.EventListener;
import de.mirkosertic.bytecoder.api.web.HTMLButton;
import de.mirkosertic.bytecoder.api.web.HTMLCanvasElement;
import de.mirkosertic.bytecoder.api.web.Window;
import org.jbox2d.collision.shapes.CircleShape;
import org.jbox2d.collision.shapes.PolygonShape;
import org.jbox2d.collision.shapes.Shape;
import org.jbox2d.collision.shapes.ShapeType;
import org.jbox2d.common.Vec2;
import org.jbox2d.dynamics.Body;
import org.jbox2d.dynamics.BodyDef;
import org.jbox2d.dynamics.BodyType;
import org.jbox2d.dynamics.Fixture;
import org.jbox2d.dynamics.FixtureDef;
import org.jbox2d.dynamics.World;
import org.jbox2d.dynamics.joints.RevoluteJointDef;
public class JBox2DSimulation {
public static class Scene {
private final World world;
private Body axis;
private Body reel;
private long lastCalculated;
private final long startTime;
public Scene() {
world = new World(new Vec2(0, -9.8f));
initAxis();
initReel();
joinReelToAxis();
initBalls();
lastCalculated = System.currentTimeMillis();
startTime = lastCalculated;
}
private void initAxis() {
final BodyDef axisDef = new BodyDef();
axisDef.type = BodyType.STATIC;
axisDef.position = new Vec2(3, 3);
axis = world.createBody(axisDef);
final CircleShape axisShape = new CircleShape();
axisShape.setRadius(0.02f);
axisShape.m_p.set(0, 0);
final FixtureDef axisFixture = new FixtureDef();
axisFixture.shape = axisShape;
axis.createFixture(axisFixture);
}
private void initReel() {
final BodyDef reelDef = new BodyDef();
reelDef.type = BodyType.DYNAMIC;
reelDef.position = new Vec2(3, 3);
reel = world.createBody(reelDef);
final FixtureDef fixture = new FixtureDef();
fixture.friction = 0.5f;
fixture.restitution = 0.4f;
fixture.density = 1;
final int parts = 30;
for (int i = 0; i < parts; ++i) {
final PolygonShape shape = new PolygonShape();
final double angle1 = i / (double) parts * 2 * Math.PI;
final double x1 = 2.7 * Math.cos(angle1);
final double y1 = 2.7 * Math.sin(angle1);
final double angle2 = (i + 1) / (double) parts * 2 * Math.PI;
final double x2 = 2.7 * Math.cos(angle2);
final double y2 = 2.7 * Math.sin(angle2);
final double angle = (angle1 + angle2) / 2;
final double x = 0.01 * Math.cos(angle);
final double y = 0.01 * Math.sin(angle);
shape.set(new Vec2[] { new Vec2((float) x1, (float) y1), new Vec2((float) x2, (float) y2),
new Vec2((float) (x2 - x), (float) (y2 - y)), new Vec2((float) (x1 - x), (float) (y1 - y)) }, 4);
fixture.shape = shape;
reel.createFixture(fixture);
}
}
private void initBalls() {
final float ballRadius = 0.15f;
final BodyDef ballDef = new BodyDef();
ballDef.type = BodyType.DYNAMIC;
final FixtureDef fixtureDef = new FixtureDef();
fixtureDef.friction = 0.3f;
fixtureDef.restitution = 0.3f;
fixtureDef.density = 0.2f;
final CircleShape shape = new CircleShape();
shape.m_radius = ballRadius;
fixtureDef.shape = shape;
for (int i = 0; i < 6; ++i) {
for (int j = 0; j < 6; ++j) {
final float x = (j + 0.5f) * (ballRadius * 2 + 0.01f);
final float y = (i + 0.5f) * (ballRadius * 2 + 0.01f);
ballDef.position.x = 3 + x;
ballDef.position.y = 3 + y;
Body body = world.createBody(ballDef);
body.createFixture(fixtureDef);
ballDef.position.x = 3 - x;
ballDef.position.y = 3 + y;
body = world.createBody(ballDef);
body.createFixture(fixtureDef);
ballDef.position.x = 3 + x;
ballDef.position.y = 3 - y;
body = world.createBody(ballDef);
body.createFixture(fixtureDef);
ballDef.position.x = 3 - x;
ballDef.position.y = 3 - y;
body = world.createBody(ballDef);
body.createFixture(fixtureDef);
}
}
}
private void joinReelToAxis() {
final RevoluteJointDef jointDef = new RevoluteJointDef();
jointDef.bodyA = axis;
jointDef.bodyB = reel;
world.createJoint(jointDef);
}
public void calculate() {
final long currentTime = System.currentTimeMillis();
int timeToCalculate = (int) (currentTime - lastCalculated);
final long relativeTime = currentTime - startTime;
while (timeToCalculate > 10) {
final int period = (int) ((relativeTime + 5000) / 10000);
reel.applyTorque(period % 2 == 0 ? 8f : -8f);
world.step(0.01f, 20, 40);
lastCalculated += 10;
timeToCalculate -= 10;
}
lastCalculated = System.currentTimeMillis();
}
public World getWorld() {
return world;
}
}
private static Scene scene;
private static CanvasRenderingContext2D renderingContext2D;
private static AnimationFrameCallback animationCallback;
private static Window window;
public static void main(final String[] args) {
scene = new Scene();
window = Window.window();
final Document document = window.document();
final HTMLCanvasElement theCanvas = document.getElementById("benchmark-canvas");
renderingContext2D = theCanvas.getContext("2d");
animationCallback = new AnimationFrameCallback() {
@Override
public void run(final int aElapsedTime) {
final long theStart = System.currentTimeMillis();
statsBegin();
scene.calculate();
render();
statsEnd();
final int theDuration = (int) (System.currentTimeMillis() - theStart);
logRuntime(theDuration);
window.requestAnimationFrame(animationCallback);
}
};
final HTMLButton button = document.getElementById("button");
button.addEventListener("click", new EventListener<MouseEvent>() {
@Override
public void run(final MouseEvent aValue) {
button.disabled(true);
window.requestAnimationFrame(animationCallback);
}
});
window.fetch("versioninfo.txt").then(response -> {
response.text().then(text -> {
document.getElementById("versioninfo").innerHTML(text);
});
});
}
@Import(module = "debug", name = "logRuntime")
public static native void logRuntime(int aValue);
@Import(module = "stats", name = "begin")
public static native void statsBegin();
@Import(module = "stats", name = "end")
public static native void statsEnd();
private static void render() {
renderingContext2D.setFillStyle("white");
renderingContext2D.setStrokeStyle("black");
renderingContext2D.fillRect(0, 0, 600, 600);
renderingContext2D.save();
renderingContext2D.translate(0, 600);
renderingContext2D.scale(1, -1);
renderingContext2D.scale(100, 100);
renderingContext2D.setLineWidth(0.01f);
for (Body body = scene.getWorld().getBodyList(); body != null; body = body.getNext()) {
final Vec2 center = body.getPosition();
renderingContext2D.save();
renderingContext2D.translate(center.x, center.y);
renderingContext2D.rotate(body.getAngle());
for (Fixture fixture = body.getFixtureList(); fixture != null; fixture = fixture.getNext()) {
final Shape shape = fixture.getShape();
if (shape.getType() == ShapeType.CIRCLE) {
final CircleShape circle = (CircleShape) shape;
renderingContext2D.beginPath();
renderingContext2D.arc(circle.m_p.x, circle.m_p.y, circle.getRadius(), 0, Math.PI * 2, true);
renderingContext2D.closePath();
renderingContext2D.stroke();
} else if (shape.getType() == ShapeType.POLYGON) {
final PolygonShape poly = (PolygonShape) shape;
final Vec2[] vertices = poly.getVertices();
renderingContext2D.beginPath();
renderingContext2D.moveTo(vertices[0].x, vertices[0].y);
for (int i = 1; i < poly.getVertexCount(); ++i) {
renderingContext2D.lineTo(vertices[i].x, vertices[i].y);
}
renderingContext2D.closePath();
renderingContext2D.stroke();
}
}
renderingContext2D.restore();
}
renderingContext2D.restore();
}
} | mirkosertic/Bytecoder | integrationtest/src/main/java/de/mirkosertic/bytecoder/integrationtest/JBox2DSimulation.java | Java | apache-2.0 | 10,500 |
// +build linux
package node
import (
"fmt"
"net"
"sync"
"syscall"
"github.com/golang/glog"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"github.com/openshift/origin/pkg/network/common"
networkinformers "github.com/openshift/origin/pkg/network/generated/informers/internalversion"
"github.com/vishvananda/netlink"
)
type egressIPWatcher struct {
sync.Mutex
tracker *common.EgressIPTracker
oc *ovsController
localIP string
masqueradeBit uint32
iptables *NodeIPTables
iptablesMark map[string]string
vxlanMonitor *egressVXLANMonitor
localEgressLink netlink.Link
localEgressNet *net.IPNet
testModeChan chan string
}
func newEgressIPWatcher(oc *ovsController, localIP string, masqueradeBit *int32) *egressIPWatcher {
eip := &egressIPWatcher{
oc: oc,
localIP: localIP,
iptablesMark: make(map[string]string),
}
if masqueradeBit != nil {
eip.masqueradeBit = 1 << uint32(*masqueradeBit)
}
eip.tracker = common.NewEgressIPTracker(eip)
return eip
}
func (eip *egressIPWatcher) Start(networkInformers networkinformers.SharedInformerFactory, iptables *NodeIPTables) error {
var err error
if eip.localEgressLink, eip.localEgressNet, err = GetLinkDetails(eip.localIP); err != nil {
// Not expected, should already be caught by node.New()
return nil
}
eip.iptables = iptables
updates := make(chan *egressVXLANNode)
eip.vxlanMonitor = newEgressVXLANMonitor(eip.oc.ovs, eip.tracker, updates)
go eip.watchVXLAN(updates)
eip.tracker.Start(networkInformers.Network().InternalVersion().HostSubnets(), networkInformers.Network().InternalVersion().NetNamespaces())
return nil
}
// Convert vnid to a hex value that is not 0, does not have masqueradeBit set, and isn't
// the same value as would be returned for any other valid vnid.
func getMarkForVNID(vnid, masqueradeBit uint32) string {
if vnid == 0 {
vnid = 0xff000000
}
if (vnid & masqueradeBit) != 0 {
vnid = (vnid | 0x01000000) ^ masqueradeBit
}
return fmt.Sprintf("0x%08x", vnid)
}
func (eip *egressIPWatcher) ClaimEgressIP(vnid uint32, egressIP, nodeIP string) {
if nodeIP == eip.localIP {
mark := getMarkForVNID(vnid, eip.masqueradeBit)
eip.iptablesMark[egressIP] = mark
if err := eip.assignEgressIP(egressIP, mark); err != nil {
utilruntime.HandleError(fmt.Errorf("Error assigning Egress IP %q: %v", egressIP, err))
}
} else if eip.vxlanMonitor != nil {
eip.vxlanMonitor.AddNode(nodeIP)
}
}
func (eip *egressIPWatcher) ReleaseEgressIP(egressIP, nodeIP string) {
if nodeIP == eip.localIP {
mark := eip.iptablesMark[egressIP]
delete(eip.iptablesMark, egressIP)
if err := eip.releaseEgressIP(egressIP, mark); err != nil {
utilruntime.HandleError(fmt.Errorf("Error releasing Egress IP %q: %v", egressIP, err))
}
} else if eip.vxlanMonitor != nil {
eip.vxlanMonitor.RemoveNode(nodeIP)
}
}
func (eip *egressIPWatcher) SetNamespaceEgressNormal(vnid uint32) {
if err := eip.oc.SetNamespaceEgressNormal(vnid); err != nil {
utilruntime.HandleError(fmt.Errorf("Error updating Namespace egress rules for VNID %d: %v", vnid, err))
}
}
func (eip *egressIPWatcher) SetNamespaceEgressDropped(vnid uint32) {
if err := eip.oc.SetNamespaceEgressDropped(vnid); err != nil {
utilruntime.HandleError(fmt.Errorf("Error updating Namespace egress rules for VNID %d: %v", vnid, err))
}
}
func (eip *egressIPWatcher) SetNamespaceEgressViaEgressIP(vnid uint32, egressIP, nodeIP string) {
mark := eip.iptablesMark[egressIP]
if err := eip.oc.SetNamespaceEgressViaEgressIP(vnid, nodeIP, mark); err != nil {
utilruntime.HandleError(fmt.Errorf("Error updating Namespace egress rules for VNID %d: %v", vnid, err))
}
}
func (eip *egressIPWatcher) assignEgressIP(egressIP, mark string) error {
if egressIP == eip.localIP {
return fmt.Errorf("desired egress IP %q is the node IP", egressIP)
}
if eip.testModeChan != nil {
eip.testModeChan <- fmt.Sprintf("claim %s", egressIP)
return nil
}
localEgressIPMaskLen, _ := eip.localEgressNet.Mask.Size()
egressIPNet := fmt.Sprintf("%s/%d", egressIP, localEgressIPMaskLen)
addr, err := netlink.ParseAddr(egressIPNet)
if err != nil {
return fmt.Errorf("could not parse egress IP %q: %v", egressIPNet, err)
}
if !eip.localEgressNet.Contains(addr.IP) {
return fmt.Errorf("egress IP %q is not in local network %s of interface %s", egressIP, eip.localEgressNet.String(), eip.localEgressLink.Attrs().Name)
}
err = netlink.AddrAdd(eip.localEgressLink, addr)
if err != nil {
if err == syscall.EEXIST {
glog.V(2).Infof("Egress IP %q already exists on %s", egressIPNet, eip.localEgressLink.Attrs().Name)
} else {
return fmt.Errorf("could not add egress IP %q to %s: %v", egressIPNet, eip.localEgressLink.Attrs().Name, err)
}
}
if err := eip.iptables.AddEgressIPRules(egressIP, mark); err != nil {
return fmt.Errorf("could not add egress IP iptables rule: %v", err)
}
return nil
}
func (eip *egressIPWatcher) releaseEgressIP(egressIP, mark string) error {
if egressIP == eip.localIP {
return nil
}
if eip.testModeChan != nil {
eip.testModeChan <- fmt.Sprintf("release %s", egressIP)
return nil
}
localEgressIPMaskLen, _ := eip.localEgressNet.Mask.Size()
egressIPNet := fmt.Sprintf("%s/%d", egressIP, localEgressIPMaskLen)
addr, err := netlink.ParseAddr(egressIPNet)
if err != nil {
return fmt.Errorf("could not parse egress IP %q: %v", egressIPNet, err)
}
err = netlink.AddrDel(eip.localEgressLink, addr)
if err != nil {
if err == syscall.EADDRNOTAVAIL {
glog.V(2).Infof("Could not delete egress IP %q from %s: no such address", egressIPNet, eip.localEgressLink.Attrs().Name)
} else {
return fmt.Errorf("could not delete egress IP %q from %s: %v", egressIPNet, eip.localEgressLink.Attrs().Name, err)
}
}
if err := eip.iptables.DeleteEgressIPRules(egressIP, mark); err != nil {
return fmt.Errorf("could not delete egress IP iptables rule: %v", err)
}
return nil
}
func (eip *egressIPWatcher) watchVXLAN(updates chan *egressVXLANNode) {
for node := range updates {
eip.tracker.SetNodeOffline(node.nodeIP, node.offline)
}
}
| legionus/origin | pkg/network/node/egressip.go | GO | apache-2.0 | 6,103 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.util.SqlBasicVisitor;
import org.apache.calcite.sql.util.SqlVisitor;
import java.util.ArrayList;
import java.util.List;
/**
* An operator describing a query. (Not a query itself.)
*
* <p>Operands are:</p>
*
* <ul>
* <li>0: distinct ({@link SqlLiteral})</li>
* <li>1: selectClause ({@link SqlNodeList})</li>
* <li>2: fromClause ({@link SqlCall} to "join" operator)</li>
* <li>3: whereClause ({@link SqlNode})</li>
* <li>4: havingClause ({@link SqlNode})</li>
* <li>5: qualifyClause ({@link SqlNode})</li>
* <li>6: groupClause ({@link SqlNode})</li>
* <li>7: windowClause ({@link SqlNodeList})</li>
* <li>8: orderClause ({@link SqlNode})</li>
* </ul>
*/
public class SqlSelectOperator extends SqlOperator {
public static final SqlSelectOperator INSTANCE =
new SqlSelectOperator();
//~ Constructors -----------------------------------------------------------
private SqlSelectOperator() {
super("SELECT", SqlKind.SELECT, 2, true, ReturnTypes.SCOPE, null, null);
}
//~ Methods ----------------------------------------------------------------
public SqlSyntax getSyntax() {
return SqlSyntax.SPECIAL;
}
public SqlCall createCall(
SqlLiteral functionQualifier,
SqlParserPos pos,
SqlNode... operands) {
assert functionQualifier == null;
return new SqlSelect(pos,
/*keywordList=*/ (SqlNodeList) operands[0],
/*selectList=*/ (SqlNodeList) operands[1],
/* from= */ operands[2],
/* where= */ operands[3],
/*groupBy=*/ (SqlNodeList) operands[4],
/*having=*/ operands[5],
/*qualify=*/ operands[6],
/*windowDecls=*/ (SqlNodeList) operands[7],
/*orderBy=*/ (SqlNodeList) operands[8],
/*offset=*/ operands[9],
/*fetch=*/ operands[10],
/*hints=*/ (SqlNodeList) operands[11]);
}
/**
* Creates a call to the <code>SELECT</code> operator.
*
* @param keywordList List of keywords such DISTINCT and ALL, or null
* @param selectList The SELECT clause, or null if empty
* @param fromClause The FROM clause
* @param whereClause The WHERE clause, or null if not present
* @param groupBy The GROUP BY clause, or null if not present
* @param having The HAVING clause, or null if not present
* @param qualify The QUALIFY clause, or null if not present
* @param windowDecls The WINDOW clause, or null if not present
* @param orderBy The ORDER BY clause, or null if not present
* @param offset Expression for number of rows to discard before
* returning first row
* @param fetch Expression for number of rows to fetch
* @param pos The parser position, or
* {@link org.apache.calcite.sql.parser.SqlParserPos#ZERO}
* if not specified; must not be null.
* @return A {@link SqlSelect}, never null
*/
public SqlSelect createCall(
SqlNodeList keywordList,
SqlNodeList selectList,
SqlNode fromClause,
SqlNode whereClause,
SqlNodeList groupBy,
SqlNode having,
SqlNode qualify,
SqlNodeList windowDecls,
SqlNodeList orderBy,
SqlNode offset,
SqlNode fetch,
SqlNodeList hints,
SqlParserPos pos) {
return new SqlSelect(
pos,
keywordList,
selectList,
fromClause,
whereClause,
groupBy,
having,
qualify,
windowDecls,
orderBy,
offset,
fetch,
hints);
}
public <R> void acceptCall(
SqlVisitor<R> visitor,
SqlCall call,
boolean onlyExpressions,
SqlBasicVisitor.ArgHandler<R> argHandler) {
if (!onlyExpressions) {
// None of the arguments to the SELECT operator are expressions.
super.acceptCall(visitor, call, onlyExpressions, argHandler);
}
}
@SuppressWarnings("deprecation")
public void unparse(
SqlWriter writer,
SqlCall call,
int leftPrec,
int rightPrec) {
SqlSelect select = (SqlSelect) call;
final SqlWriter.Frame selectFrame =
writer.startList(SqlWriter.FrameTypeEnum.SELECT);
writer.sep("SELECT");
if (select.hasHints()) {
writer.sep("/*+");
select.hints.unparse(writer, 0, 0);
writer.print("*/");
writer.newlineAndIndent();
}
for (int i = 0; i < select.keywordList.size(); i++) {
final SqlNode keyword = select.keywordList.get(i);
keyword.unparse(writer, 0, 0);
}
if (select.topN != null) {
writer.topN(select.topN);
} else {
writer.fetchAsTopN(select.fetch, select.offset);
}
final SqlNodeList selectClause =
select.selectList != null
? select.selectList
: SqlNodeList.of(SqlIdentifier.star(SqlParserPos.ZERO));
writer.list(SqlWriter.FrameTypeEnum.SELECT_LIST, SqlWriter.COMMA,
selectClause);
if (select.from != null) {
// Calcite SQL requires FROM but MySQL does not.
writer.sep("FROM");
// for FROM clause, use precedence just below join operator to make
// sure that an un-joined nested select will be properly
// parenthesized
final SqlWriter.Frame fromFrame =
writer.startList(SqlWriter.FrameTypeEnum.FROM_LIST);
select.from.unparse(
writer,
SqlJoin.OPERATOR.getLeftPrec() - 1,
SqlJoin.OPERATOR.getRightPrec() - 1);
writer.endList(fromFrame);
}
if (select.where != null) {
writer.sep("WHERE");
if (!writer.isAlwaysUseParentheses()) {
SqlNode node = select.where;
// decide whether to split on ORs or ANDs
SqlBinaryOperator whereSep = SqlStdOperatorTable.AND;
if ((node instanceof SqlCall)
&& node.getKind() == SqlKind.OR) {
whereSep = SqlStdOperatorTable.OR;
}
// unroll whereClause
final List<SqlNode> list = new ArrayList<>(0);
while (node.getKind() == whereSep.kind) {
assert node instanceof SqlCall;
final SqlCall call1 = (SqlCall) node;
list.add(0, call1.operand(1));
node = call1.operand(0);
}
list.add(0, node);
// unparse in a WHERE_LIST frame
writer.list(SqlWriter.FrameTypeEnum.WHERE_LIST, whereSep,
new SqlNodeList(list, select.where.getParserPosition()));
} else {
select.where.unparse(writer, 0, 0);
}
}
if (select.groupBy != null) {
writer.sep("GROUP BY");
final SqlNodeList groupBy =
select.groupBy.size() == 0 ? SqlNodeList.SINGLETON_EMPTY
: select.groupBy;
writer.list(SqlWriter.FrameTypeEnum.GROUP_BY_LIST, SqlWriter.COMMA,
groupBy);
}
if (select.having != null) {
writer.sep("HAVING");
select.having.unparse(writer, 0, 0);
}
if (select.getQualify() != null) {
writer.sep("QUALIFY");
select.getQualify().unparse(writer, 0, 0);
}
if (select.windowDecls.size() > 0) {
writer.sep("WINDOW");
writer.list(SqlWriter.FrameTypeEnum.WINDOW_DECL_LIST, SqlWriter.COMMA,
select.windowDecls);
}
if (select.orderBy != null && select.orderBy.size() > 0) {
writer.sep("ORDER BY");
writer.list(SqlWriter.FrameTypeEnum.ORDER_BY_LIST, SqlWriter.COMMA,
select.orderBy);
}
writer.fetchOffset(select.fetch, select.offset);
writer.endList(selectFrame);
}
public boolean argumentMustBeScalar(int ordinal) {
return ordinal == SqlSelect.WHERE_OPERAND;
}
}
| googleinterns/calcite | core/src/main/java/org/apache/calcite/sql/SqlSelectOperator.java | Java | apache-2.0 | 8,563 |
package org.docksidestage.hangar.dbflute.dtomapper;
import java.util.Map;
import org.dbflute.Entity;
import org.docksidestage.hangar.dbflute.dtomapper.bs.BsMemberServiceDtoMapper;
/**
* The DTO mapper of MEMBER_SERVICE.
* <p>
* You can implement your original methods here.
* This class remains when re-generating.
* </p>
* @author DBFlute(AutoGenerator)
*/
public class MemberServiceDtoMapper extends BsMemberServiceDtoMapper {
/** Serial version UID. (Default) */
private static final long serialVersionUID = 1L;
public MemberServiceDtoMapper() {
}
public MemberServiceDtoMapper(Map<Entity, Object> relationDtoMap, Map<Object, Entity> relationEntityMap) {
super(relationDtoMap, relationEntityMap);
}
}
| dbflute-test/dbflute-test-active-hangar | src/main/java/org/docksidestage/hangar/dbflute/dtomapper/MemberServiceDtoMapper.java | Java | apache-2.0 | 776 |
/*
* @Author: aaronpmishkin
* @Date: 2016-06-17 09:05:15
* @Last Modified by: aaronpmishkin
* @Last Modified time: 2017-06-02 17:48:12
*/
// Import Angular Classes:
import { Injectable } from '@angular/core';
import { NgZone } from '@angular/core';
// Import Libraries:
import * as d3 from 'd3';
import { Subject } from 'rxjs/Subject';
import '../../utilities/rxjs-operators';
// Import Application Classes
import { RendererService } from '../services/Renderer.service';
import { ChartUndoRedoService } from '../services/ChartUndoRedo.service';
import { LabelDefinitions } from '../definitions/Label.definitions';
// Import Model Classes:
import { Objective } from '../../../model/Objective';
import { PrimitiveObjective } from '../../../model/PrimitiveObjective';
import { AbstractObjective } from '../../../model/AbstractObjective';
import { RowData, CellData, LabelData, RendererConfig } from '../../../types/RendererData.types';
import { RendererUpdate } from '../../../types/RendererData.types';
import { ObjectivesRecord } from '../../../types/Record.types';
/*
This class contains all the logic for dragging objective labels change the order of objectives in the objective and summary charts.
Any objective label can be dragged within the confines of its parent label's Dimension Two so that it may be reordered with respect
to its siblings (ie. the other children of the parent). The rows of the objective and summary charts are reordered to reflect the change
in the label ordering when a label is released.
*/
@Injectable()
export class ReorderObjectivesInteraction {
// ========================================================================================
// Fields
// ========================================================================================
public lastRendererUpdate: RendererUpdate;
private labelRootContainer: d3.Selection<any,any,any,any>;
private reorderSubject: Subject<boolean>;
private ignoreReorder: boolean; // Whether the drag events should be ignored. If true, all dragging of the current label is ignored.
// Fields used to store information about the active dragging event chain.
private reorderObjectiveMouseOffset: number; // Offset of the mouse from the Coordinate Two position of the label that is to be dragged. This is set when dragging first begins.
private totalCoordTwoChange: number = 0; // The Coordinate Two distance that the label has been moved so far.
private containerToReorder: d3.Selection<any, any, any, any>; // The the d3 selection of the 'g' element that holds the label being reordered.
private parentObjectiveName: string; // The name of the parent objective for the label being reordered.
private parentContainer: d3.Selection<any, any, any, any>; // The selection of the container that holds the container for the label being reordered.
private siblingContainers: d3.Selection<any, any, any, any>; // The selection of label containers s.t. every label container is at the same level in the label hierarchy as containerToReorder and also has the same parent label.
private objectiveDimensionTwo: number; // The Dimension Two (height if vertical, width of horizontal) of the label being dragged.
private objectiveCoordTwoOffset: number; // The initial Coordinate Two position (y if vertical, x if horizontal) of the label being reordered.
private maxCoordinateTwo: number; // The maximum Coordinate Two that label being reordered can have before it exits the label area.
private currentObjectiveIndex: number; // The index of the label being reordered in the list of siblings.
private newObjectiveIndex: number; // The new index of the label as a result of the dragging.
private jumpPoints: number[]; // The list of points that define what position the label being reordered has been moved to.
// ========================================================================================
// Constructor
// ========================================================================================
/*
@returns {void}
@description Used for Angular's dependency injection ONLY. It should not be used to do any initialization of the class.
This constructor will be called automatically when Angular constructs an instance of this class prior to dependency injection.
*/
constructor(
private rendererService: RendererService,
private chartUndoRedoService: ChartUndoRedoService) {
this.chartUndoRedoService.undoRedoDispatcher.on(this.chartUndoRedoService.OBJECTIVES_CHANGE, this.changeRowOrder);
}
// ========================================================================================
// Methods
// ========================================================================================
/*
@param enableReordering - Whether or not to enable dragging to reorder objectives.
@returns {void}
@description Toggles clicking and dragging labels in the label area to reorder objectives. Both abstract and primitive objectives
can be reordered via label dragging when the user interaction is enabled. Dragging is implemented using d3's dragging
system and makes use of all three drag events. 'start' is used to perform setup that is required to for dragging to work
properly and is called before the 'drag' events fire. 'drag' is used to implement the visual dragging mechanism. Note that
the handler for these events, reorderObjectives, only updates the visual display of the objective area. 'end' is used to
actually reorder the objectives within the objective hierarchy, and then re-render the ValueChart via the ValueChartDirective.
*/
public toggleObjectiveReordering(enableReordering: boolean, labelRootContainer: d3.Selection<any, any, any, any>, rendererUpdate: RendererUpdate): Subject<boolean> {
this.lastRendererUpdate = rendererUpdate;
this.labelRootContainer = labelRootContainer;
var labelOutlines: d3.Selection<any, any, any, any> = labelRootContainer.selectAll('.' + LabelDefinitions.SUBCONTAINER_OUTLINE);
var labelTexts: d3.Selection<any, any, any, any> = labelRootContainer.selectAll('.' + LabelDefinitions.SUBCONTAINER_TEXT);
var dragToReorder: d3.DragBehavior<any, any, any> = d3.drag();
if (enableReordering) {
dragToReorder
.on('start', this.startReorderObjectives)
.on('drag', this.reorderObjectives)
.on('end', this.endReorderObjectives);
}
labelOutlines.call(dragToReorder);
labelTexts.call(dragToReorder);
this.reorderSubject = new Subject();
return this.reorderSubject;
}
// This function is called when a user first begins to drag a label to rearrange the ordering of objectives. It contains all the logic required to initialize the drag,
// including determining the bounds that the label can be dragged in, the points where the label is considered to have switched positions, etc.
private startReorderObjectives = (d: LabelData, i: number) => {
// Reset variables.
this.ignoreReorder = false; // Whether the drag events should be ignored. If true, all further dragging of the current label will be ignored.
this.reorderObjectiveMouseOffset = undefined; // Offset of the mouse from the Coordinate Two position of the label that is to be dragged.
this.totalCoordTwoChange = 0; // The Coordinate Two distance that the label has been moved so far.
this.containerToReorder = d3.select('#label-' + d.objective.getId() + '-container'); // The container that holds the label being reordered.
this.parentObjectiveName = (<Element>this.containerToReorder.node()).getAttribute('parent'); // The name of the parent objective for the label being reordered.
// If the selected label is the root label, then it is not possible to reorder, and all further drag events for this selection should be ignored.
if (this.parentObjectiveName === LabelDefinitions.ROOT_CONTAINER_NAME) {
this.ignoreReorder = true;
return;
}
this.chartUndoRedoService.saveObjectivesRecord(this.lastRendererUpdate.valueChart.getRootObjectives());
this.parentContainer = d3.select('#label-' + this.parentObjectiveName + '-container'); // The container that holds the container for the label being reordered.
this.siblingContainers = this.parentContainer.selectAll('g[parent="' + this.parentObjectiveName + '"]'); // The selection of label containers s.t. every label container is at the same level as containerToReorder, with the same parent.
// Note: siblingsConatiners includes containerToReorder.
// Set all the siblings that are NOT being moved to be partially transparent.
this.siblingContainers.style('opacity', 0.5);
this.containerToReorder.style('opacity', 1);
var parentOutline: d3.Selection<any, any, any, any> = this.parentContainer.select('rect'); // Select the rect that outlines the parent label of the label being reordered.
var currentOutline: d3.Selection<any, any, any, any> = this.containerToReorder.select('rect'); // Select the rect that outlines the label being reordered.
this.objectiveDimensionTwo = +currentOutline.attr(this.lastRendererUpdate.rendererConfig.dimensionTwo); // Determine the Dimension Two (height if vertical, width of horizontal) of the label being dragged.
this.maxCoordinateTwo = +parentOutline.attr(this.lastRendererUpdate.rendererConfig.dimensionTwo) - this.objectiveDimensionTwo; // Determine the maximum Coordinate Two of the label being reordered.
this.objectiveCoordTwoOffset = +currentOutline.attr(this.lastRendererUpdate.rendererConfig.coordinateTwo); // Determine the initial Coordinate Two position (y if vertical, x if horizontal) of the label being reordered.
this.currentObjectiveIndex = this.siblingContainers.nodes().indexOf(this.containerToReorder.node()); // Determine the index of the label being reordered in the list of siblings.
this.newObjectiveIndex = this.currentObjectiveIndex;
this.jumpPoints = [0]; // Initialize the list of points which define what position the label being reordered has been moved to.
this.siblingContainers.select('rect').nodes().forEach((el: Element) => {
if (el !== undefined) {
// For each of the labels that the label being reordered can be switched with, determine its Coordinate Two midpoint. This is used to determine what position the label being reordered has been moved to.
let selection: d3.Selection<any, any, any, any> = d3.select(el);
let jumpPoint: number = (+selection.attr(this.lastRendererUpdate.rendererConfig.dimensionTwo) / 2) + +selection.attr(this.lastRendererUpdate.rendererConfig.coordinateTwo);
this.jumpPoints.push(jumpPoint);
}
});
this.jumpPoints.push(this.lastRendererUpdate.rendererConfig.dimensionTwoSize);
}
// This function is called whenever a label that is being reordered is dragged by the user. It contains the logic which updates the
// position of the label so the user knows where they have dragged it to as well as the code that determines what position the label will be in when dragging ends.
private reorderObjectives = (d: LabelData, i: number) => {
// Do nothing if we are ignoring the current dragging of the label.
if (this.ignoreReorder) {
return;
}
// Get the change in Coordinate Two from the d3 event. Note that although we are getting coordinateTwo, not dCoordinateTwo, this is the still the change.
// The reason for this is because when a label is dragged, the transform of label container is changed, which can changes cooordinateTwo of the outline rectangle inside the container.
// THis change is equal to deltaCoordinateTwo, meaning d3.event.cooordinateTwo is reset to 0 at the end of cooordinateTwo drag event, making cooordinateTwo really dCoordinateTwo
var deltaCoordinateTwo: number = (<any>d3.event)[this.lastRendererUpdate.rendererConfig.coordinateTwo];
// If we have not yet determined the mouse offset, then this is the first drag event that has been fired, and the mouse offset from 0 should the current mouse position.
if (this.reorderObjectiveMouseOffset === undefined) {
this.reorderObjectiveMouseOffset = deltaCoordinateTwo;
}
deltaCoordinateTwo = deltaCoordinateTwo - this.reorderObjectiveMouseOffset; // Subtract the mouse offset to get the change in Coordinate Two from the 0 point.
// Calculate the current Coordinate Two position of the label that is being reordered.
// This is the recent change (deltaCoordinateTwo) + the totalChange so far (this.totalCoordTwoChange) + the offset of the label before dragging began (this.objectiveCoordTwoOffset)
var currentCoordTwoPosition: number = deltaCoordinateTwo + this.totalCoordTwoChange + this.objectiveCoordTwoOffset;
// Make sure that the label does not exit the bounds of the label area.
if (currentCoordTwoPosition < 0) {
deltaCoordinateTwo = 0 - this.totalCoordTwoChange - this.objectiveCoordTwoOffset;
} else if (currentCoordTwoPosition > this.maxCoordinateTwo) {
deltaCoordinateTwo = this.maxCoordinateTwo - this.totalCoordTwoChange - this.objectiveCoordTwoOffset;
}
// Add the most recent change in Coordinate Two to the total change so far.
this.totalCoordTwoChange += deltaCoordinateTwo;
// If we are dragging the label up, then we want to check the current position of the label from its top.
// If we are dragging the label down, then we want to check the current position of the label form its bottom.
var labelDimensionTwoOffset: number = (this.totalCoordTwoChange > 0) ? this.objectiveDimensionTwo : 0;
// Determine which of the two jump points the label is current between, and assigned its new position accordingly.
for (var i = 0; i < this.jumpPoints.length; i++) {
if (this.totalCoordTwoChange + labelDimensionTwoOffset > (this.jumpPoints[i] - this.objectiveCoordTwoOffset)
&& this.totalCoordTwoChange + labelDimensionTwoOffset < (this.jumpPoints[i + 1] - this.objectiveCoordTwoOffset)) {
this.newObjectiveIndex = i;
break;
}
}
// If we were dragging down, then the index is one off and must be decremented.
if (this.totalCoordTwoChange > 0)
this.newObjectiveIndex--;
// Retrieved the previous transform of the label we are dragging so that it can be incremented properly.
var previousTransform: string = this.containerToReorder.attr('transform');
// Generate the new transform.
var labelTransform: string = this.rendererService.incrementTransform(this.lastRendererUpdate.viewConfig, previousTransform, 0, deltaCoordinateTwo);
// Apply the new transformation to the label.
this.containerToReorder.attr('transform', labelTransform);
}
// This function is called when the label that is being reordered is released by the user, and dragging ends. It contains the logic for re-rendering the ValueChart according
// to the labels new position.
private endReorderObjectives = (d: LabelData, i: number) => {
// Do nothing if we are ignoring the current dragging of the label.
if (this.ignoreReorder) {
return;
}
// Get the label data for the siblings of the label we arranged. Note that this contains the label data for the label we rearranged.
var parentData: LabelData = this.parentContainer.datum();
// Move the label data for the label we rearranged to its new position in the array of labels.
if (this.newObjectiveIndex !== this.currentObjectiveIndex) {
// Reorder the label data.
let temp: LabelData = parentData.subLabelData.splice(this.currentObjectiveIndex, 1)[0];
parentData.subLabelData.splice(this.newObjectiveIndex, 0, temp);
// Reorder the Objectives
let siblingObjectives: Objective[] = (<AbstractObjective>parentData.objective).getDirectSubObjectives()
let tempObjective: Objective = siblingObjectives.splice(this.currentObjectiveIndex, 1)[0];
siblingObjectives.splice(this.newObjectiveIndex, 0, tempObjective);
} else {
// No changes were made, so delete the change record that was created in startReorderObjectives.
this.chartUndoRedoService.deleteNewestRecord();
this.lastRendererUpdate.renderRequired.value = true;
}
// Select all the label data, not just the siblings of the label we moved.
var labelData: LabelData[] = <any> d3.select('g[parent=' + LabelDefinitions.ROOT_CONTAINER_NAME + ']').data();
// Re-arrange the rows of the objective and summary charts according to the new objective ordering. Note this triggers change detection in ValueChartDirective that
// updates the object and summary charts. This is to avoid making the labelRenderer dependent on the other renderers.
this.lastRendererUpdate.valueChart.setRootObjectives(this.getOrderedRootObjectives(labelData));
this.siblingContainers.style('opacity', 1);
this.containerToReorder.style('opacity', 1);
this.reorderSubject.next(true);
}
// This function extracts the ordering of objectives from the ordering of labels.
private getOrderedRootObjectives(labelData: LabelData[]): Objective[] {
var rootObjectives: Objective[] = [];
labelData.forEach((labelDatum: LabelData) => {
var objective: Objective = labelDatum.objective;
if (labelDatum.depthOfChildren !== 0) {
(<AbstractObjective>objective).setDirectSubObjectives(this.getOrderedRootObjectives(labelDatum.subLabelData));
}
rootObjectives.push(objective);
});
return rootObjectives;
}
changeRowOrder = (objectivesRecord: ObjectivesRecord) => {
this.lastRendererUpdate.valueChart.setRootObjectives(objectivesRecord.rootObjectives);
this.lastRendererUpdate.labelData[0] = undefined;
this.reorderSubject.next(true);
}
} | aaronpmishkin/CUCSC-ValueCharts | client/modules/ValueChart/interactions/ReorderObjectives.interaction.ts | TypeScript | apache-2.0 | 17,688 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* CdnConfigurationService.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202105;
public interface CdnConfigurationService extends javax.xml.rpc.Service {
public java.lang.String getCdnConfigurationServiceInterfacePortAddress();
public com.google.api.ads.admanager.axis.v202105.CdnConfigurationServiceInterface getCdnConfigurationServiceInterfacePort() throws javax.xml.rpc.ServiceException;
public com.google.api.ads.admanager.axis.v202105.CdnConfigurationServiceInterface getCdnConfigurationServiceInterfacePort(java.net.URL portAddress) throws javax.xml.rpc.ServiceException;
}
| googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202105/CdnConfigurationService.java | Java | apache-2.0 | 1,308 |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.creole.command;
import net.sourceforge.plantuml.ISkinSimple;
import net.sourceforge.plantuml.command.regex.Matcher2;
import net.sourceforge.plantuml.command.regex.MyPattern;
import net.sourceforge.plantuml.command.regex.Pattern2;
import net.sourceforge.plantuml.creole.Parser;
import net.sourceforge.plantuml.creole.legacy.StripeSimple;
import net.sourceforge.plantuml.graphic.Splitter;
import net.sourceforge.plantuml.ugraphic.color.HColor;
public class CommandCreoleSprite implements Command {
@Override
public String startingChars() {
return "<";
}
private static final Pattern2 pattern = MyPattern.cmpile("^(" + Splitter.spritePattern2 + ")");
private CommandCreoleSprite() {
}
public static Command create() {
return new CommandCreoleSprite();
}
public int matchingSize(String line) {
final Matcher2 m = pattern.matcher(line);
if (m.find() == false)
return 0;
return m.group(1).length();
}
public String executeAndGetRemaining(String line, StripeSimple stripe) {
final Matcher2 m = pattern.matcher(line);
if (m.find() == false)
throw new IllegalStateException();
final String src = m.group(2);
final double scale = Parser.getScale(m.group(3), 1);
final String colorName = Parser.getColor(m.group(3));
HColor color = null;
if (colorName != null) {
final ISkinSimple skinParam = stripe.getSkinParam();
color = skinParam.getIHtmlColorSet().getColorOrWhite(skinParam.getThemeStyle(), colorName);
}
stripe.addSprite(src, scale, color);
return line.substring(m.group(1).length());
}
}
| talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/creole/command/CommandCreoleSprite.java | Java | apache-2.0 | 2,672 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Threading;
using System.Diagnostics;
using TrashyWindowControl;
namespace SetCoordinatesExample
{
class Program
{
static void Main(string[] args)
{
//make sure a process called "notepad" is already running befor starting this programm!
WindowControl twc = new WindowControl();
twc.process = Process.GetProcessesByName("notepad").FirstOrDefault();
char input;
Console.WriteLine("Move the window with WASD!");
Console.WriteLine("You can change the height with R/F and the width with T/G");
do
{
input = Console.ReadKey().KeyChar;
input = Char.ToLower(input);
switch (input)
{
case 'w':
{
twc.y = twc.y - 5;
break;
}
case 's':
{
twc.y = twc.y + 5;
break;
}
case 'd':
{
twc.x = twc.x + 5;
break;
}
case 'a':
{
twc.x = twc.x - 5;
break;
}
case 'r':
{
twc.height = twc.height + 5;
break;
}
case 'f':
{
twc.height = twc.height - 5;
break;
}
case 't':
{
twc.width = twc.width + 5;
break;
}
case 'g':
{
twc.width = twc.width - 5;
break;
}
}
} while (true);
}
}
}
| sh1n1xs/TrashyWindowControl | TrashyWindowControlExamples/SetCoordinatesExample/Program.cs | C# | apache-2.0 | 2,416 |
/**
*/
package CIM15.IEC61968.Metering;
import CIM15.IEC61968.Customers.CustomerAgreement;
import CIM15.IEC61968.Customers.CustomersPackage;
import CIM15.IEC61970.Core.IdentifiedObject;
import CIM15.IEC61970.Domain.DateTimeInterval;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>End Device Control</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getDemandResponseProgram <em>Demand Response Program</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getDrProgramLevel <em>Dr Program Level</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#isDrProgramMandatory <em>Dr Program Mandatory</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getCustomerAgreement <em>Customer Agreement</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getType <em>Type</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getScheduledInterval <em>Scheduled Interval</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getEndDeviceGroup <em>End Device Group</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getEndDevice <em>End Device</em>}</li>
* <li>{@link CIM15.IEC61968.Metering.EndDeviceControl#getPriceSignal <em>Price Signal</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class EndDeviceControl extends IdentifiedObject {
/**
* The cached value of the '{@link #getDemandResponseProgram() <em>Demand Response Program</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDemandResponseProgram()
* @generated
* @ordered
*/
protected DemandResponseProgram demandResponseProgram;
/**
* The default value of the '{@link #getDrProgramLevel() <em>Dr Program Level</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDrProgramLevel()
* @generated
* @ordered
*/
protected static final int DR_PROGRAM_LEVEL_EDEFAULT = 0;
/**
* The cached value of the '{@link #getDrProgramLevel() <em>Dr Program Level</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDrProgramLevel()
* @generated
* @ordered
*/
protected int drProgramLevel = DR_PROGRAM_LEVEL_EDEFAULT;
/**
* This is true if the Dr Program Level attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean drProgramLevelESet;
/**
* The default value of the '{@link #isDrProgramMandatory() <em>Dr Program Mandatory</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isDrProgramMandatory()
* @generated
* @ordered
*/
protected static final boolean DR_PROGRAM_MANDATORY_EDEFAULT = false;
/**
* The cached value of the '{@link #isDrProgramMandatory() <em>Dr Program Mandatory</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isDrProgramMandatory()
* @generated
* @ordered
*/
protected boolean drProgramMandatory = DR_PROGRAM_MANDATORY_EDEFAULT;
/**
* This is true if the Dr Program Mandatory attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean drProgramMandatoryESet;
/**
* The cached value of the '{@link #getCustomerAgreement() <em>Customer Agreement</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getCustomerAgreement()
* @generated
* @ordered
*/
protected CustomerAgreement customerAgreement;
/**
* The default value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected static final String TYPE_EDEFAULT = null;
/**
* The cached value of the '{@link #getType() <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getType()
* @generated
* @ordered
*/
protected String type = TYPE_EDEFAULT;
/**
* This is true if the Type attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean typeESet;
/**
* The cached value of the '{@link #getScheduledInterval() <em>Scheduled Interval</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getScheduledInterval()
* @generated
* @ordered
*/
protected DateTimeInterval scheduledInterval;
/**
* The cached value of the '{@link #getEndDeviceGroup() <em>End Device Group</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEndDeviceGroup()
* @generated
* @ordered
*/
protected EndDeviceGroup endDeviceGroup;
/**
* The cached value of the '{@link #getEndDevice() <em>End Device</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getEndDevice()
* @generated
* @ordered
*/
protected EndDevice endDevice;
/**
* The default value of the '{@link #getPriceSignal() <em>Price Signal</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPriceSignal()
* @generated
* @ordered
*/
protected static final float PRICE_SIGNAL_EDEFAULT = 0.0F;
/**
* The cached value of the '{@link #getPriceSignal() <em>Price Signal</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPriceSignal()
* @generated
* @ordered
*/
protected float priceSignal = PRICE_SIGNAL_EDEFAULT;
/**
* This is true if the Price Signal attribute has been set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
* @ordered
*/
protected boolean priceSignalESet;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EndDeviceControl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return MeteringPackage.Literals.END_DEVICE_CONTROL;
}
/**
* Returns the value of the '<em><b>Demand Response Program</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61968.Metering.DemandResponseProgram#getEndDeviceControls <em>End Device Controls</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Demand Response Program</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Demand Response Program</em>' reference.
* @see #setDemandResponseProgram(DemandResponseProgram)
* @see CIM15.IEC61968.Metering.DemandResponseProgram#getEndDeviceControls
* @generated
*/
public DemandResponseProgram getDemandResponseProgram() {
if (demandResponseProgram != null && demandResponseProgram.eIsProxy()) {
InternalEObject oldDemandResponseProgram = (InternalEObject)demandResponseProgram;
demandResponseProgram = (DemandResponseProgram)eResolveProxy(oldDemandResponseProgram);
if (demandResponseProgram != oldDemandResponseProgram) {
}
}
return demandResponseProgram;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DemandResponseProgram basicGetDemandResponseProgram() {
return demandResponseProgram;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetDemandResponseProgram(DemandResponseProgram newDemandResponseProgram, NotificationChain msgs) {
DemandResponseProgram oldDemandResponseProgram = demandResponseProgram;
demandResponseProgram = newDemandResponseProgram;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getDemandResponseProgram <em>Demand Response Program</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Demand Response Program</em>' reference.
* @see #getDemandResponseProgram()
* @generated
*/
public void setDemandResponseProgram(DemandResponseProgram newDemandResponseProgram) {
if (newDemandResponseProgram != demandResponseProgram) {
NotificationChain msgs = null;
if (demandResponseProgram != null)
msgs = ((InternalEObject)demandResponseProgram).eInverseRemove(this, MeteringPackage.DEMAND_RESPONSE_PROGRAM__END_DEVICE_CONTROLS, DemandResponseProgram.class, msgs);
if (newDemandResponseProgram != null)
msgs = ((InternalEObject)newDemandResponseProgram).eInverseAdd(this, MeteringPackage.DEMAND_RESPONSE_PROGRAM__END_DEVICE_CONTROLS, DemandResponseProgram.class, msgs);
msgs = basicSetDemandResponseProgram(newDemandResponseProgram, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>Dr Program Level</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Dr Program Level</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Dr Program Level</em>' attribute.
* @see #isSetDrProgramLevel()
* @see #unsetDrProgramLevel()
* @see #setDrProgramLevel(int)
* @generated
*/
public int getDrProgramLevel() {
return drProgramLevel;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getDrProgramLevel <em>Dr Program Level</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Dr Program Level</em>' attribute.
* @see #isSetDrProgramLevel()
* @see #unsetDrProgramLevel()
* @see #getDrProgramLevel()
* @generated
*/
public void setDrProgramLevel(int newDrProgramLevel) {
drProgramLevel = newDrProgramLevel;
drProgramLevelESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getDrProgramLevel <em>Dr Program Level</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetDrProgramLevel()
* @see #getDrProgramLevel()
* @see #setDrProgramLevel(int)
* @generated
*/
public void unsetDrProgramLevel() {
drProgramLevel = DR_PROGRAM_LEVEL_EDEFAULT;
drProgramLevelESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getDrProgramLevel <em>Dr Program Level</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Dr Program Level</em>' attribute is set.
* @see #unsetDrProgramLevel()
* @see #getDrProgramLevel()
* @see #setDrProgramLevel(int)
* @generated
*/
public boolean isSetDrProgramLevel() {
return drProgramLevelESet;
}
/**
* Returns the value of the '<em><b>Dr Program Mandatory</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Dr Program Mandatory</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Dr Program Mandatory</em>' attribute.
* @see #isSetDrProgramMandatory()
* @see #unsetDrProgramMandatory()
* @see #setDrProgramMandatory(boolean)
* @generated
*/
public boolean isDrProgramMandatory() {
return drProgramMandatory;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#isDrProgramMandatory <em>Dr Program Mandatory</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Dr Program Mandatory</em>' attribute.
* @see #isSetDrProgramMandatory()
* @see #unsetDrProgramMandatory()
* @see #isDrProgramMandatory()
* @generated
*/
public void setDrProgramMandatory(boolean newDrProgramMandatory) {
drProgramMandatory = newDrProgramMandatory;
drProgramMandatoryESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#isDrProgramMandatory <em>Dr Program Mandatory</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetDrProgramMandatory()
* @see #isDrProgramMandatory()
* @see #setDrProgramMandatory(boolean)
* @generated
*/
public void unsetDrProgramMandatory() {
drProgramMandatory = DR_PROGRAM_MANDATORY_EDEFAULT;
drProgramMandatoryESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#isDrProgramMandatory <em>Dr Program Mandatory</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Dr Program Mandatory</em>' attribute is set.
* @see #unsetDrProgramMandatory()
* @see #isDrProgramMandatory()
* @see #setDrProgramMandatory(boolean)
* @generated
*/
public boolean isSetDrProgramMandatory() {
return drProgramMandatoryESet;
}
/**
* Returns the value of the '<em><b>Customer Agreement</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61968.Customers.CustomerAgreement#getEndDeviceControls <em>End Device Controls</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Customer Agreement</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Customer Agreement</em>' reference.
* @see #setCustomerAgreement(CustomerAgreement)
* @see CIM15.IEC61968.Customers.CustomerAgreement#getEndDeviceControls
* @generated
*/
public CustomerAgreement getCustomerAgreement() {
if (customerAgreement != null && customerAgreement.eIsProxy()) {
InternalEObject oldCustomerAgreement = (InternalEObject)customerAgreement;
customerAgreement = (CustomerAgreement)eResolveProxy(oldCustomerAgreement);
if (customerAgreement != oldCustomerAgreement) {
}
}
return customerAgreement;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public CustomerAgreement basicGetCustomerAgreement() {
return customerAgreement;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetCustomerAgreement(CustomerAgreement newCustomerAgreement, NotificationChain msgs) {
CustomerAgreement oldCustomerAgreement = customerAgreement;
customerAgreement = newCustomerAgreement;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getCustomerAgreement <em>Customer Agreement</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Customer Agreement</em>' reference.
* @see #getCustomerAgreement()
* @generated
*/
public void setCustomerAgreement(CustomerAgreement newCustomerAgreement) {
if (newCustomerAgreement != customerAgreement) {
NotificationChain msgs = null;
if (customerAgreement != null)
msgs = ((InternalEObject)customerAgreement).eInverseRemove(this, CustomersPackage.CUSTOMER_AGREEMENT__END_DEVICE_CONTROLS, CustomerAgreement.class, msgs);
if (newCustomerAgreement != null)
msgs = ((InternalEObject)newCustomerAgreement).eInverseAdd(this, CustomersPackage.CUSTOMER_AGREEMENT__END_DEVICE_CONTROLS, CustomerAgreement.class, msgs);
msgs = basicSetCustomerAgreement(newCustomerAgreement, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>Type</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Type</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Type</em>' attribute.
* @see #isSetType()
* @see #unsetType()
* @see #setType(String)
* @generated
*/
public String getType() {
return type;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getType <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Type</em>' attribute.
* @see #isSetType()
* @see #unsetType()
* @see #getType()
* @generated
*/
public void setType(String newType) {
type = newType;
typeESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getType <em>Type</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetType()
* @see #getType()
* @see #setType(String)
* @generated
*/
public void unsetType() {
type = TYPE_EDEFAULT;
typeESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getType <em>Type</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Type</em>' attribute is set.
* @see #unsetType()
* @see #getType()
* @see #setType(String)
* @generated
*/
public boolean isSetType() {
return typeESet;
}
/**
* Returns the value of the '<em><b>Scheduled Interval</b></em>' containment reference.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Scheduled Interval</em>' containment reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Scheduled Interval</em>' containment reference.
* @see #setScheduledInterval(DateTimeInterval)
* @generated
*/
public DateTimeInterval getScheduledInterval() {
return scheduledInterval;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetScheduledInterval(DateTimeInterval newScheduledInterval, NotificationChain msgs) {
DateTimeInterval oldScheduledInterval = scheduledInterval;
scheduledInterval = newScheduledInterval;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getScheduledInterval <em>Scheduled Interval</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Scheduled Interval</em>' containment reference.
* @see #getScheduledInterval()
* @generated
*/
public void setScheduledInterval(DateTimeInterval newScheduledInterval) {
if (newScheduledInterval != scheduledInterval) {
NotificationChain msgs = null;
if (scheduledInterval != null)
msgs = ((InternalEObject)scheduledInterval).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL, null, msgs);
if (newScheduledInterval != null)
msgs = ((InternalEObject)newScheduledInterval).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL, null, msgs);
msgs = basicSetScheduledInterval(newScheduledInterval, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>End Device Group</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61968.Metering.EndDeviceGroup#getEndDeviceControls <em>End Device Controls</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>End Device Group</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>End Device Group</em>' reference.
* @see #setEndDeviceGroup(EndDeviceGroup)
* @see CIM15.IEC61968.Metering.EndDeviceGroup#getEndDeviceControls
* @generated
*/
public EndDeviceGroup getEndDeviceGroup() {
if (endDeviceGroup != null && endDeviceGroup.eIsProxy()) {
InternalEObject oldEndDeviceGroup = (InternalEObject)endDeviceGroup;
endDeviceGroup = (EndDeviceGroup)eResolveProxy(oldEndDeviceGroup);
if (endDeviceGroup != oldEndDeviceGroup) {
}
}
return endDeviceGroup;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EndDeviceGroup basicGetEndDeviceGroup() {
return endDeviceGroup;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetEndDeviceGroup(EndDeviceGroup newEndDeviceGroup, NotificationChain msgs) {
EndDeviceGroup oldEndDeviceGroup = endDeviceGroup;
endDeviceGroup = newEndDeviceGroup;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getEndDeviceGroup <em>End Device Group</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>End Device Group</em>' reference.
* @see #getEndDeviceGroup()
* @generated
*/
public void setEndDeviceGroup(EndDeviceGroup newEndDeviceGroup) {
if (newEndDeviceGroup != endDeviceGroup) {
NotificationChain msgs = null;
if (endDeviceGroup != null)
msgs = ((InternalEObject)endDeviceGroup).eInverseRemove(this, MeteringPackage.END_DEVICE_GROUP__END_DEVICE_CONTROLS, EndDeviceGroup.class, msgs);
if (newEndDeviceGroup != null)
msgs = ((InternalEObject)newEndDeviceGroup).eInverseAdd(this, MeteringPackage.END_DEVICE_GROUP__END_DEVICE_CONTROLS, EndDeviceGroup.class, msgs);
msgs = basicSetEndDeviceGroup(newEndDeviceGroup, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>End Device</b></em>' reference.
* It is bidirectional and its opposite is '{@link CIM15.IEC61968.Metering.EndDevice#getEndDeviceControls <em>End Device Controls</em>}'.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>End Device</em>' reference isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>End Device</em>' reference.
* @see #setEndDevice(EndDevice)
* @see CIM15.IEC61968.Metering.EndDevice#getEndDeviceControls
* @generated
*/
public EndDevice getEndDevice() {
if (endDevice != null && endDevice.eIsProxy()) {
InternalEObject oldEndDevice = (InternalEObject)endDevice;
endDevice = (EndDevice)eResolveProxy(oldEndDevice);
if (endDevice != oldEndDevice) {
}
}
return endDevice;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EndDevice basicGetEndDevice() {
return endDevice;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetEndDevice(EndDevice newEndDevice, NotificationChain msgs) {
EndDevice oldEndDevice = endDevice;
endDevice = newEndDevice;
return msgs;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getEndDevice <em>End Device</em>}' reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>End Device</em>' reference.
* @see #getEndDevice()
* @generated
*/
public void setEndDevice(EndDevice newEndDevice) {
if (newEndDevice != endDevice) {
NotificationChain msgs = null;
if (endDevice != null)
msgs = ((InternalEObject)endDevice).eInverseRemove(this, MeteringPackage.END_DEVICE__END_DEVICE_CONTROLS, EndDevice.class, msgs);
if (newEndDevice != null)
msgs = ((InternalEObject)newEndDevice).eInverseAdd(this, MeteringPackage.END_DEVICE__END_DEVICE_CONTROLS, EndDevice.class, msgs);
msgs = basicSetEndDevice(newEndDevice, msgs);
if (msgs != null) msgs.dispatch();
}
}
/**
* Returns the value of the '<em><b>Price Signal</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Price Signal</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Price Signal</em>' attribute.
* @see #isSetPriceSignal()
* @see #unsetPriceSignal()
* @see #setPriceSignal(float)
* @generated
*/
public float getPriceSignal() {
return priceSignal;
}
/**
* Sets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getPriceSignal <em>Price Signal</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Price Signal</em>' attribute.
* @see #isSetPriceSignal()
* @see #unsetPriceSignal()
* @see #getPriceSignal()
* @generated
*/
public void setPriceSignal(float newPriceSignal) {
priceSignal = newPriceSignal;
priceSignalESet = true;
}
/**
* Unsets the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getPriceSignal <em>Price Signal</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetPriceSignal()
* @see #getPriceSignal()
* @see #setPriceSignal(float)
* @generated
*/
public void unsetPriceSignal() {
priceSignal = PRICE_SIGNAL_EDEFAULT;
priceSignalESet = false;
}
/**
* Returns whether the value of the '{@link CIM15.IEC61968.Metering.EndDeviceControl#getPriceSignal <em>Price Signal</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Price Signal</em>' attribute is set.
* @see #unsetPriceSignal()
* @see #getPriceSignal()
* @see #setPriceSignal(float)
* @generated
*/
public boolean isSetPriceSignal() {
return priceSignalESet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
if (demandResponseProgram != null)
msgs = ((InternalEObject)demandResponseProgram).eInverseRemove(this, MeteringPackage.DEMAND_RESPONSE_PROGRAM__END_DEVICE_CONTROLS, DemandResponseProgram.class, msgs);
return basicSetDemandResponseProgram((DemandResponseProgram)otherEnd, msgs);
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
if (customerAgreement != null)
msgs = ((InternalEObject)customerAgreement).eInverseRemove(this, CustomersPackage.CUSTOMER_AGREEMENT__END_DEVICE_CONTROLS, CustomerAgreement.class, msgs);
return basicSetCustomerAgreement((CustomerAgreement)otherEnd, msgs);
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
if (endDeviceGroup != null)
msgs = ((InternalEObject)endDeviceGroup).eInverseRemove(this, MeteringPackage.END_DEVICE_GROUP__END_DEVICE_CONTROLS, EndDeviceGroup.class, msgs);
return basicSetEndDeviceGroup((EndDeviceGroup)otherEnd, msgs);
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
if (endDevice != null)
msgs = ((InternalEObject)endDevice).eInverseRemove(this, MeteringPackage.END_DEVICE__END_DEVICE_CONTROLS, EndDevice.class, msgs);
return basicSetEndDevice((EndDevice)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
return basicSetDemandResponseProgram(null, msgs);
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
return basicSetCustomerAgreement(null, msgs);
case MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL:
return basicSetScheduledInterval(null, msgs);
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
return basicSetEndDeviceGroup(null, msgs);
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
return basicSetEndDevice(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
if (resolve) return getDemandResponseProgram();
return basicGetDemandResponseProgram();
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_LEVEL:
return getDrProgramLevel();
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_MANDATORY:
return isDrProgramMandatory();
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
if (resolve) return getCustomerAgreement();
return basicGetCustomerAgreement();
case MeteringPackage.END_DEVICE_CONTROL__TYPE:
return getType();
case MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL:
return getScheduledInterval();
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
if (resolve) return getEndDeviceGroup();
return basicGetEndDeviceGroup();
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
if (resolve) return getEndDevice();
return basicGetEndDevice();
case MeteringPackage.END_DEVICE_CONTROL__PRICE_SIGNAL:
return getPriceSignal();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
setDemandResponseProgram((DemandResponseProgram)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_LEVEL:
setDrProgramLevel((Integer)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_MANDATORY:
setDrProgramMandatory((Boolean)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
setCustomerAgreement((CustomerAgreement)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__TYPE:
setType((String)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL:
setScheduledInterval((DateTimeInterval)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
setEndDeviceGroup((EndDeviceGroup)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
setEndDevice((EndDevice)newValue);
return;
case MeteringPackage.END_DEVICE_CONTROL__PRICE_SIGNAL:
setPriceSignal((Float)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
setDemandResponseProgram((DemandResponseProgram)null);
return;
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_LEVEL:
unsetDrProgramLevel();
return;
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_MANDATORY:
unsetDrProgramMandatory();
return;
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
setCustomerAgreement((CustomerAgreement)null);
return;
case MeteringPackage.END_DEVICE_CONTROL__TYPE:
unsetType();
return;
case MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL:
setScheduledInterval((DateTimeInterval)null);
return;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
setEndDeviceGroup((EndDeviceGroup)null);
return;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
setEndDevice((EndDevice)null);
return;
case MeteringPackage.END_DEVICE_CONTROL__PRICE_SIGNAL:
unsetPriceSignal();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case MeteringPackage.END_DEVICE_CONTROL__DEMAND_RESPONSE_PROGRAM:
return demandResponseProgram != null;
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_LEVEL:
return isSetDrProgramLevel();
case MeteringPackage.END_DEVICE_CONTROL__DR_PROGRAM_MANDATORY:
return isSetDrProgramMandatory();
case MeteringPackage.END_DEVICE_CONTROL__CUSTOMER_AGREEMENT:
return customerAgreement != null;
case MeteringPackage.END_DEVICE_CONTROL__TYPE:
return isSetType();
case MeteringPackage.END_DEVICE_CONTROL__SCHEDULED_INTERVAL:
return scheduledInterval != null;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE_GROUP:
return endDeviceGroup != null;
case MeteringPackage.END_DEVICE_CONTROL__END_DEVICE:
return endDevice != null;
case MeteringPackage.END_DEVICE_CONTROL__PRICE_SIGNAL:
return isSetPriceSignal();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (drProgramLevel: ");
if (drProgramLevelESet) result.append(drProgramLevel); else result.append("<unset>");
result.append(", drProgramMandatory: ");
if (drProgramMandatoryESet) result.append(drProgramMandatory); else result.append("<unset>");
result.append(", type: ");
if (typeESet) result.append(type); else result.append("<unset>");
result.append(", priceSignal: ");
if (priceSignalESet) result.append(priceSignal); else result.append("<unset>");
result.append(')');
return result.toString();
}
} // EndDeviceControl
| SES-fortiss/SmartGridCoSimulation | core/cim15/src/CIM15/IEC61968/Metering/EndDeviceControl.java | Java | apache-2.0 | 33,206 |
#region License and Terms
// MoreLINQ - Extensions to LINQ to Objects
// Copysecond (c) 2017 Atif Aziz. All seconds reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
namespace MoreLinq
{
using System;
using System.Collections.Generic;
using System.Linq;
static partial class MoreEnumerable
{
/// <summary>
/// Performs a full outer join on two homogeneous sequences.
/// Additional arguments specify key selection functions and result
/// projection functions.
/// </summary>
/// <typeparam name="TSource">
/// The type of elements in the source sequence.</typeparam>
/// <typeparam name="TKey">
/// The type of the key returned by the key selector function.</typeparam>
/// <typeparam name="TResult">
/// The type of the result elements.</typeparam>
/// <param name="first">
/// The first sequence to join fully.</param>
/// <param name="second">
/// The second sequence to join fully.</param>
/// <param name="keySelector">
/// Function that projects the key given an element of one of the
/// sequences to join.</param>
/// <param name="firstSelector">
/// Function that projects the result given just an element from
/// <paramref name="first"/> where there is no corresponding element
/// in <paramref name="second"/>.</param>
/// <param name="secondSelector">
/// Function that projects the result given just an element from
/// <paramref name="second"/> where there is no corresponding element
/// in <paramref name="first"/>.</param>
/// <param name="bothSelector">
/// Function that projects the result given an element from
/// <paramref name="first"/> and an element from <paramref name="second"/>
/// that match on a common key.</param>
/// <returns>A sequence containing results projected from a full
/// outer join of the two input sequences.</returns>
public static IEnumerable<TResult> FullJoin<TSource, TKey, TResult>(
this IEnumerable<TSource> first,
IEnumerable<TSource> second,
Func<TSource, TKey> keySelector,
Func<TSource, TResult> firstSelector,
Func<TSource, TResult> secondSelector,
Func<TSource, TSource, TResult> bothSelector)
{
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
return first.FullJoin(second, keySelector,
firstSelector, secondSelector, bothSelector,
null);
}
/// <summary>
/// Performs a full outer join on two homogeneous sequences.
/// Additional arguments specify key selection functions, result
/// projection functions and a key comparer.
/// </summary>
/// <typeparam name="TSource">
/// The type of elements in the source sequence.</typeparam>
/// <typeparam name="TKey">
/// The type of the key returned by the key selector function.</typeparam>
/// <typeparam name="TResult">
/// The type of the result elements.</typeparam>
/// <param name="first">
/// The first sequence to join fully.</param>
/// <param name="second">
/// The second sequence to join fully.</param>
/// <param name="keySelector">
/// Function that projects the key given an element of one of the
/// sequences to join.</param>
/// <param name="firstSelector">
/// Function that projects the result given just an element from
/// <paramref name="first"/> where there is no corresponding element
/// in <paramref name="second"/>.</param>
/// <param name="secondSelector">
/// Function that projects the result given just an element from
/// <paramref name="second"/> where there is no corresponding element
/// in <paramref name="first"/>.</param>
/// <param name="bothSelector">
/// Function that projects the result given an element from
/// <paramref name="first"/> and an element from <paramref name="second"/>
/// that match on a common key.</param>
/// <param name="comparer">
/// The <see cref="IEqualityComparer{T}"/> instance used to compare
/// keys for equality.</param>
/// <returns>A sequence containing results projected from a full
/// outer join of the two input sequences.</returns>
public static IEnumerable<TResult> FullJoin<TSource, TKey, TResult>(
this IEnumerable<TSource> first,
IEnumerable<TSource> second,
Func<TSource, TKey> keySelector,
Func<TSource, TResult> firstSelector,
Func<TSource, TResult> secondSelector,
Func<TSource, TSource, TResult> bothSelector,
IEqualityComparer<TKey>? comparer)
{
if (keySelector == null) throw new ArgumentNullException(nameof(keySelector));
return first.FullJoin(second,
keySelector, keySelector,
firstSelector, secondSelector, bothSelector,
comparer);
}
/// <summary>
/// Performs a full outer join on two heterogeneous sequences.
/// Additional arguments specify key selection functions and result
/// projection functions.
/// </summary>
/// <typeparam name="TFirst">
/// The type of elements in the first sequence.</typeparam>
/// <typeparam name="TSecond">
/// The type of elements in the second sequence.</typeparam>
/// <typeparam name="TKey">
/// The type of the key returned by the key selector functions.</typeparam>
/// <typeparam name="TResult">
/// The type of the result elements.</typeparam>
/// <param name="first">
/// The first sequence to join fully.</param>
/// <param name="second">
/// The second sequence to join fully.</param>
/// <param name="firstKeySelector">
/// Function that projects the key given an element from <paramref name="first"/>.</param>
/// <param name="secondKeySelector">
/// Function that projects the key given an element from <paramref name="second"/>.</param>
/// <param name="firstSelector">
/// Function that projects the result given just an element from
/// <paramref name="first"/> where there is no corresponding element
/// in <paramref name="second"/>.</param>
/// <param name="secondSelector">
/// Function that projects the result given just an element from
/// <paramref name="second"/> where there is no corresponding element
/// in <paramref name="first"/>.</param>
/// <param name="bothSelector">
/// Function that projects the result given an element from
/// <paramref name="first"/> and an element from <paramref name="second"/>
/// that match on a common key.</param>
/// <returns>A sequence containing results projected from a full
/// outer join of the two input sequences.</returns>
public static IEnumerable<TResult> FullJoin<TFirst, TSecond, TKey, TResult>(
this IEnumerable<TFirst> first,
IEnumerable<TSecond> second,
Func<TFirst, TKey> firstKeySelector,
Func<TSecond, TKey> secondKeySelector,
Func<TFirst, TResult> firstSelector,
Func<TSecond, TResult> secondSelector,
Func<TFirst, TSecond, TResult> bothSelector) =>
first.FullJoin(second,
firstKeySelector, secondKeySelector,
firstSelector, secondSelector, bothSelector,
null);
/// <summary>
/// Performs a full outer join on two heterogeneous sequences.
/// Additional arguments specify key selection functions, result
/// projection functions and a key comparer.
/// </summary>
/// <typeparam name="TFirst">
/// The type of elements in the first sequence.</typeparam>
/// <typeparam name="TSecond">
/// The type of elements in the second sequence.</typeparam>
/// <typeparam name="TKey">
/// The type of the key returned by the key selector functions.</typeparam>
/// <typeparam name="TResult">
/// The type of the result elements.</typeparam>
/// <param name="first">
/// The first sequence to join fully.</param>
/// <param name="second">
/// The second sequence to join fully.</param>
/// <param name="firstKeySelector">
/// Function that projects the key given an element from <paramref name="first"/>.</param>
/// <param name="secondKeySelector">
/// Function that projects the key given an element from <paramref name="second"/>.</param>
/// <param name="firstSelector">
/// Function that projects the result given just an element from
/// <paramref name="first"/> where there is no corresponding element
/// in <paramref name="second"/>.</param>
/// <param name="secondSelector">
/// Function that projects the result given just an element from
/// <paramref name="second"/> where there is no corresponding element
/// in <paramref name="first"/>.</param>
/// <param name="bothSelector">
/// Function that projects the result given an element from
/// <paramref name="first"/> and an element from <paramref name="second"/>
/// that match on a common key.</param>
/// <param name="comparer">
/// The <see cref="IEqualityComparer{T}"/> instance used to compare
/// keys for equality.</param>
/// <returns>A sequence containing results projected from a full
/// outer join of the two input sequences.</returns>
public static IEnumerable<TResult> FullJoin<TFirst, TSecond, TKey, TResult>(
this IEnumerable<TFirst> first,
IEnumerable<TSecond> second,
Func<TFirst, TKey> firstKeySelector,
Func<TSecond, TKey> secondKeySelector,
Func<TFirst, TResult> firstSelector,
Func<TSecond, TResult> secondSelector,
Func<TFirst, TSecond, TResult> bothSelector,
IEqualityComparer<TKey>? comparer)
{
if (first == null) throw new ArgumentNullException(nameof(first));
if (second == null) throw new ArgumentNullException(nameof(second));
if (firstKeySelector == null) throw new ArgumentNullException(nameof(firstKeySelector));
if (secondKeySelector == null) throw new ArgumentNullException(nameof(secondKeySelector));
if (firstSelector == null) throw new ArgumentNullException(nameof(firstSelector));
if (secondSelector == null) throw new ArgumentNullException(nameof(secondSelector));
if (bothSelector == null) throw new ArgumentNullException(nameof(bothSelector));
return _(); IEnumerable<TResult> _()
{
var seconds = second.Select(e => new KeyValuePair<TKey, TSecond>(secondKeySelector(e), e)).ToArray();
var secondLookup = seconds.ToLookup(e => e.Key, e => e.Value, comparer);
var firstKeys = new HashSet<TKey>(comparer);
foreach (var fe in first)
{
var key = firstKeySelector(fe);
firstKeys.Add(key);
using var se = secondLookup[key].GetEnumerator();
if (se.MoveNext())
{
do { yield return bothSelector(fe, se.Current); }
while (se.MoveNext());
}
else
{
se.Dispose();
yield return firstSelector(fe);
}
}
foreach (var se in seconds)
{
if (!firstKeys.Contains(se.Key))
yield return secondSelector(se.Value);
}
}
}
}
}
| morelinq/MoreLINQ | MoreLinq/FullJoin.cs | C# | apache-2.0 | 12,941 |
// SPDX-License-Identifier: Apache-2.0
// Copyright Authors of Hubble
//go:build !privileged_tests
// +build !privileged_tests
package filters
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
flowpb "github.com/cilium/cilium/api/v1/flow"
v1 "github.com/cilium/cilium/pkg/hubble/api/v1"
)
func TestApply(t *testing.T) {
ffyes := FilterFuncs{func(_ *v1.Event) bool {
return true
}}
ffno := FilterFuncs{func(_ *v1.Event) bool {
return false
}}
type args struct {
whitelist FilterFuncs
blacklist FilterFuncs
ev *v1.Event
}
tests := []struct {
name string
args args
want bool
}{
{args: args{whitelist: ffyes}, want: true},
{args: args{whitelist: ffno}, want: false},
{args: args{blacklist: ffno}, want: true},
{args: args{blacklist: ffyes}, want: false},
{args: args{whitelist: ffyes, blacklist: ffyes}, want: false},
{args: args{whitelist: ffyes, blacklist: ffno}, want: true},
{args: args{whitelist: ffno, blacklist: ffyes}, want: false},
{args: args{whitelist: ffno, blacklist: ffno}, want: false},
{args: args{}, want: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := Apply(tt.args.whitelist, tt.args.blacklist, tt.args.ev); got != tt.want {
t.Errorf("Apply() = %v, want %v", got, tt.want)
}
})
}
}
func TestMatch(t *testing.T) {
fyes := func(_ *v1.Event) bool {
return true
}
fno := func(_ *v1.Event) bool {
return false
}
fs := FilterFuncs{fyes, fno}
assert.False(t, fs.MatchAll(nil))
assert.True(t, fs.MatchOne(nil))
assert.False(t, fs.MatchNone(nil))
// When no filter is specified, MatchAll(), MatchOne() and MatchNone() must
// all return true
fs = FilterFuncs{}
assert.True(t, fs.MatchAll(nil))
assert.True(t, fs.MatchOne(nil))
assert.True(t, fs.MatchNone(nil))
}
type testFilterTrue struct{}
func (t *testFilterTrue) OnBuildFilter(_ context.Context, ff *flowpb.FlowFilter) ([]FilterFunc, error) {
return []FilterFunc{func(ev *v1.Event) bool { return true }}, nil
}
type testFilterFalse struct{}
func (t *testFilterFalse) OnBuildFilter(_ context.Context, ff *flowpb.FlowFilter) ([]FilterFunc, error) {
return []FilterFunc{func(ev *v1.Event) bool { return false }}, nil
}
func TestOnBuildFilter(t *testing.T) {
fl, err := BuildFilterList(context.Background(),
[]*flowpb.FlowFilter{{SourceIdentity: []uint32{1, 2, 3}}}, // true
[]OnBuildFilter{&testFilterTrue{}}) // true
assert.NoError(t, err)
assert.Equal(t, true, fl.MatchAll(&v1.Event{Event: &flowpb.Flow{
Source: &flowpb.Endpoint{Identity: 3},
}}))
fl, err = BuildFilterList(context.Background(),
[]*flowpb.FlowFilter{{SourceIdentity: []uint32{1, 2, 3}}}, // true
[]OnBuildFilter{&testFilterFalse{}}) // false
assert.NoError(t, err)
assert.Equal(t, false, fl.MatchAll(&v1.Event{Event: &flowpb.Flow{
Source: &flowpb.Endpoint{Identity: 3},
}}))
fl, err = BuildFilterList(context.Background(),
[]*flowpb.FlowFilter{{SourceIdentity: []uint32{1, 2, 3}}}, // true
[]OnBuildFilter{
&testFilterFalse{}, // false
&testFilterTrue{}}) // true
assert.NoError(t, err)
assert.Equal(t, false, fl.MatchAll(&v1.Event{Event: &flowpb.Flow{
Source: &flowpb.Endpoint{Identity: 3},
}}))
}
| cilium/cilium | pkg/hubble/filters/filters_test.go | GO | apache-2.0 | 3,264 |
package pet
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"github.com/go-swagger/go-swagger/httpkit"
)
/*DeletePetBadRequest Invalid pet value
swagger:response deletePetBadRequest
*/
type DeletePetBadRequest struct {
}
// NewDeletePetBadRequest creates DeletePetBadRequest with default headers values
func NewDeletePetBadRequest() *DeletePetBadRequest {
return &DeletePetBadRequest{}
}
// WriteResponse to the client
func (o *DeletePetBadRequest) WriteResponse(rw http.ResponseWriter, producer httpkit.Producer) {
rw.WriteHeader(400)
}
| Dataman-Cloud/drone | vendor/github.com/go-swagger/go-swagger/examples/generated/restapi/operations/pet/delete_pet_responses.go | GO | apache-2.0 | 655 |
package com.eftimoff.mapreduce.filtering.distinct;
import static com.eftimoff.mapreduce.utils.MRDPUtils.transformXmlToMap;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class DistinctUser extends Configured implements Tool {
public static class DistinctUserMapper extends
Mapper<Object, Text, Text, NullWritable> {
private Text outUserId = new Text();
public void map(Object key, Text value, Context context)
throws IOException, InterruptedException {
Map<String, String> parsed = transformXmlToMap(value.toString());
// Get the value for the UserId attribute
String userId = parsed.get("UserId");
if (userId == null) {
return;
}
// Set our output key to the user's id
outUserId.set(userId);
// Write the user's id with a null value
context.write(outUserId, NullWritable.get());
}
}
public static class DistinctUserReducer extends
Reducer<Text, NullWritable, Text, NullWritable> {
public void reduce(Text key, Iterable<NullWritable> values,
Context context) throws IOException, InterruptedException {
// Write the user's id with a null value
context.write(key, NullWritable.get());
}
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new DistinctUser(), args);
System.exit(res);
}
@Override
public int run(String[] args) throws Exception {
Configuration conf = new Configuration();
GenericOptionsParser parser = new GenericOptionsParser(conf, args);
String[] otherArgs = parser.getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: DistinctUser <in> <out>");
ToolRunner.printGenericCommandUsage(System.err);
System.exit(2);
}
Job job = new Job(conf, "Distinct User");
job.setJarByClass(DistinctUser.class);
job.setMapperClass(DistinctUserMapper.class);
job.setReducerClass(DistinctUserReducer.class);
job.setCombinerClass(DistinctUserReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
boolean success = job.waitForCompletion(true);
return success ? 0 : 1;
}
}
| geftimov/MapReduce | src/main/java/com/eftimoff/mapreduce/filtering/distinct/DistinctUser.java | Java | apache-2.0 | 2,821 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using Microsoft.CodeAnalysis.PooledObjects;
namespace Microsoft.CodeAnalysis.CSharp.Symbols
{
internal sealed class SynthesizedEmbeddedNativeIntegerAttributeSymbol : SynthesizedEmbeddedAttributeSymbolBase
{
private readonly ImmutableArray<FieldSymbol> _fields;
private readonly ImmutableArray<MethodSymbol> _constructors;
private readonly TypeSymbol _boolType;
private const string FieldName = "TransformFlags";
public SynthesizedEmbeddedNativeIntegerAttributeSymbol(
string name,
NamespaceSymbol containingNamespace,
ModuleSymbol containingModule,
NamedTypeSymbol systemAttributeType,
TypeSymbol boolType)
: base(name, containingNamespace, containingModule, baseType: systemAttributeType)
{
_boolType = boolType;
var boolArrayType = TypeWithAnnotations.Create(
ArrayTypeSymbol.CreateSZArray(
boolType.ContainingAssembly,
TypeWithAnnotations.Create(boolType)));
_fields = ImmutableArray.Create<FieldSymbol>(
new SynthesizedFieldSymbol(
this,
boolArrayType.Type,
FieldName,
isPublic: true,
isReadOnly: true,
isStatic: false));
_constructors = ImmutableArray.Create<MethodSymbol>(
new SynthesizedEmbeddedAttributeConstructorWithBodySymbol(
this,
m => ImmutableArray<ParameterSymbol>.Empty,
(f, s, p) => GenerateParameterlessConstructorBody(f, s)),
new SynthesizedEmbeddedAttributeConstructorWithBodySymbol(
this,
m => ImmutableArray.Create(SynthesizedParameterSymbol.Create(m, boolArrayType, 0, RefKind.None)),
(f, s, p) => GenerateBoolArrayConstructorBody(f, s, p)));
// Ensure we never get out of sync with the description
Debug.Assert(_constructors.Length == AttributeDescription.NativeIntegerAttribute.Signatures.Length);
}
internal override IEnumerable<FieldSymbol> GetFieldsToEmit() => _fields;
public override ImmutableArray<MethodSymbol> Constructors => _constructors;
internal override AttributeUsageInfo GetAttributeUsageInfo()
{
return new AttributeUsageInfo(
AttributeTargets.Class | AttributeTargets.Event | AttributeTargets.Field | AttributeTargets.GenericParameter | AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.ReturnValue,
allowMultiple: false,
inherited: false);
}
private void GenerateParameterlessConstructorBody(SyntheticBoundNodeFactory factory, ArrayBuilder<BoundStatement> statements)
{
statements.Add(
factory.ExpressionStatement(
factory.AssignmentExpression(
factory.Field(
factory.This(),
_fields.Single()),
factory.Array(
_boolType,
ImmutableArray.Create<BoundExpression>(factory.Literal(true))
)
)
)
);
}
private void GenerateBoolArrayConstructorBody(SyntheticBoundNodeFactory factory, ArrayBuilder<BoundStatement> statements, ImmutableArray<ParameterSymbol> parameters)
{
statements.Add(
factory.ExpressionStatement(
factory.AssignmentExpression(
factory.Field(
factory.This(),
_fields.Single()),
factory.Parameter(parameters.Single())
)
)
);
}
}
}
| brettfo/roslyn | src/Compilers/CSharp/Portable/Symbols/Synthesized/SynthesizedEmbeddedNativeIntegerAttributeSymbol.cs | C# | apache-2.0 | 4,340 |
import React, { FC, useState } from 'react';
import styled from 'styled-components';
import { Modal } from 'antd';
import { Z_INDEX_GREATER_THAN_HEADER } from 'components/Header';
import { useHistory } from 'react-router-dom';
import { useTranslation } from 'react-i18next';
import { useToggle } from 'react-use';
import { Steps, Row } from 'antd';
import StepOneBasic from './StepOneBasic';
import StepTwoAddBatch from './StepTwoAddBatch';
import { useResetCreateForm } from 'hooks/dataset';
import { forceToRefreshQuery } from 'shared/queryClient';
import { DATASET_LIST_QUERY_KEY } from '../DatasetList';
const ContainerModal = styled(Modal)`
.ant-modal-body {
padding-bottom: 14px;
}
.ant-modal-footer {
display: none;
}
`;
const StepRow = styled(Row)`
width: 340px;
margin: 10px auto 35px;
`;
const CreateDataset: FC = () => {
const history = useHistory();
const { t } = useTranslation();
const [step, setStep] = useState(0);
const [visible, toggleVisible] = useToggle(true);
const resetForm = useResetCreateForm();
return (
<ContainerModal
title={t('dataset.title_create')}
visible={visible}
style={{ top: '20%' }}
width="fit-content"
closable={false}
maskClosable={false}
maskStyle={{ backdropFilter: 'blur(4px)' }}
keyboard={false}
afterClose={afterClose}
getContainer="body"
zIndex={Z_INDEX_GREATER_THAN_HEADER}
onCancel={() => toggleVisible(false)}
>
<StepRow justify="center">
<Steps current={step} size="small">
<Steps.Step title={t('dataset.step_basic')} />
<Steps.Step title={t('dataset.step_add_batch')} />
</Steps>
</StepRow>
{step === 0 && <StepOneBasic onSuccess={goAddBatch} onCancel={closeModal} />}
{step === 1 && (
<StepTwoAddBatch
onSuccess={onCreateNStartImportSuccess}
onPrevious={backToStepBasic}
onCancel={closeModal}
/>
)}
</ContainerModal>
);
function afterClose() {
history.push('/datasets');
}
function goAddBatch() {
setStep(1);
}
function backToStepBasic() {
setStep(0);
}
function closeModal() {
resetForm();
toggleVisible(false);
}
function onCreateNStartImportSuccess() {
forceToRefreshQuery([DATASET_LIST_QUERY_KEY]);
closeModal();
}
};
export default CreateDataset;
| bytedance/fedlearner | web_console_v2/client/src/views/Datasets/CreateDataset/index.tsx | TypeScript | apache-2.0 | 2,390 |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.hierarchy;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.DeleteProvider;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.util.DeleteHandler;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.ui.PopupHandler;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Supplier;
public abstract class TypeHierarchyBrowserBase extends HierarchyBrowserBaseEx {
public static final String TYPE_HIERARCHY_TYPE = "Class {0}";
public static final String SUBTYPES_HIERARCHY_TYPE = "Subtypes of {0}";
public static final String SUPERTYPES_HIERARCHY_TYPE = "Supertypes of {0}";
private boolean myIsInterface;
private final MyDeleteProvider myDeleteElementProvider = new MyDeleteProvider();
public TypeHierarchyBrowserBase(Project project, PsiElement element) {
super(project, element);
}
protected abstract boolean isInterface(@NotNull PsiElement psiElement);
protected void createTreeAndSetupCommonActions(@NotNull Map<? super @Nls String, ? super JTree> trees, @NotNull String groupId) {
BaseOnThisTypeAction baseOnThisTypeAction = createBaseOnThisAction();
JTree tree1 = createTree(true);
PopupHandler.installPopupMenu(tree1, groupId, ActionPlaces.TYPE_HIERARCHY_VIEW_POPUP);
baseOnThisTypeAction
.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_TYPE_HIERARCHY).getShortcutSet(), tree1);
trees.put(getTypeHierarchyType(), tree1);
JTree tree2 = createTree(true);
PopupHandler.installPopupMenu(tree2, groupId, ActionPlaces.TYPE_HIERARCHY_VIEW_POPUP);
baseOnThisTypeAction
.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_TYPE_HIERARCHY).getShortcutSet(), tree2);
trees.put(getSupertypesHierarchyType(), tree2);
JTree tree3 = createTree(true);
PopupHandler.installPopupMenu(tree3, groupId, ActionPlaces.TYPE_HIERARCHY_VIEW_POPUP);
baseOnThisTypeAction
.registerCustomShortcutSet(ActionManager.getInstance().getAction(IdeActions.ACTION_TYPE_HIERARCHY).getShortcutSet(), tree3);
trees.put(getSubtypesHierarchyType(), tree3);
}
@NotNull
protected BaseOnThisTypeAction createBaseOnThisAction() {
return new BaseOnThisTypeAction();
}
protected abstract boolean canBeDeleted(PsiElement psiElement);
protected abstract String getQualifiedName(PsiElement psiElement);
@Override
protected @NotNull Map<String, Supplier<String>> getPresentableNameMap() {
HashMap<String, Supplier<String>> map = new HashMap<>();
map.put(TYPE_HIERARCHY_TYPE, TypeHierarchyBrowserBase::getTypeHierarchyType);
map.put(SUBTYPES_HIERARCHY_TYPE, TypeHierarchyBrowserBase::getSubtypesHierarchyType);
map.put(SUPERTYPES_HIERARCHY_TYPE, TypeHierarchyBrowserBase::getSupertypesHierarchyType);
return map;
}
public boolean isInterface() {
return myIsInterface;
}
@Override
protected void setHierarchyBase(@NotNull PsiElement element) {
super.setHierarchyBase(element);
myIsInterface = isInterface(element);
}
@Override
protected void prependActions(@NotNull DefaultActionGroup actionGroup) {
actionGroup.add(new ViewClassHierarchyAction());
actionGroup.add(new ViewSupertypesHierarchyAction());
actionGroup.add(new ViewSubtypesHierarchyAction());
actionGroup.add(new AlphaSortAction());
}
@Override
@NotNull
protected String getActionPlace() {
return ActionPlaces.TYPE_HIERARCHY_VIEW_TOOLBAR;
}
@Override
public final Object getData(@NotNull String dataId) {
if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
return myDeleteElementProvider;
}
return super.getData(dataId);
}
@Override
@NotNull
protected String getPrevOccurenceActionNameImpl() {
return IdeBundle.message("hierarchy.type.prev.occurence.name");
}
@Override
@NotNull
protected String getNextOccurenceActionNameImpl() {
return IdeBundle.message("hierarchy.type.next.occurence.name");
}
private final class MyDeleteProvider implements DeleteProvider {
@Override
public void deleteElement(@NotNull DataContext dataContext) {
PsiElement aClass = getSelectedElement();
if (!canBeDeleted(aClass)) return;
LocalHistoryAction a = LocalHistory.getInstance().startAction(IdeBundle.message("progress.deleting.class", getQualifiedName(aClass)));
try {
PsiElement[] elements = {aClass};
DeleteHandler.deletePsiElement(elements, myProject);
}
finally {
a.finish();
}
}
@Override
public boolean canDeleteElement(@NotNull DataContext dataContext) {
PsiElement aClass = getSelectedElement();
if (!canBeDeleted(aClass)) {
return false;
}
PsiElement[] elements = {aClass};
return DeleteHandler.shouldEnableDeleteAction(elements);
}
}
protected static class BaseOnThisTypeAction extends BaseOnThisElementAction {
public BaseOnThisTypeAction() {
super(IdeBundle.messagePointer("action.base.on.this.class"), LanguageTypeHierarchy.INSTANCE);
}
@Override
@Nls
protected String correctViewType(@NotNull HierarchyBrowserBaseEx browser, @Nls String viewType) {
if (((TypeHierarchyBrowserBase)browser).myIsInterface && getTypeHierarchyType().equals(viewType)) {
return getSubtypesHierarchyType();
}
return viewType;
}
}
public static @Nls String getTypeHierarchyType() {
//noinspection UnresolvedPropertyKey
return IdeBundle.message("title.hierarchy.class");
}
@Nls
public static String getSubtypesHierarchyType() {
//noinspection UnresolvedPropertyKey
return IdeBundle.message("title.hierarchy.subtypes");
}
public static @Nls String getSupertypesHierarchyType() {
//noinspection UnresolvedPropertyKey
return IdeBundle.message("title.hierarchy.supertypes");
}
}
| GunoH/intellij-community | platform/lang-impl/src/com/intellij/ide/hierarchy/TypeHierarchyBrowserBase.java | Java | apache-2.0 | 6,301 |
package exceptions;
public class IDaoSaveException extends Exception{
private static final long serialVersionUID = 8041577551538125989L;
public IDaoSaveException() {
super();
}
public IDaoSaveException(String msg) {
super(msg);
}
public IDaoSaveException(String msg,Throwable cause) {
super(msg,cause);
}
}
| patrickfav/tuwien | master/SB_task2/src/exceptions/IDaoSaveException.java | Java | apache-2.0 | 325 |
//
//
// Copyright 2012 Kii Corporation
// http://kii.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//
package com.kii.cloud.engine;
public class TaskType {
public static class FileTask {
public static final int UPLOAD = 0;
public static final int DOWNLOAD = 1;
public static final int UPDATE = 2;
public static final int UPDATE_METADATA = 3;
public static final int REFRESH = 4;
public static final int DELETE = 5;
public static final int MOVE_TRASH = 6;
public static final int RESTORE_TRASH = 7;
public static final int LIST_CLOUD = 8;
public static final int LIST_TRASH = 9;
}
public static class UserTask {
public static final int LOGIN = 0;
public static final int REGISTER = 1;
public static final int CHANGE_PASSWORD = 2;
public static final int UPDATE = 3;
public static final int DELETE = 4;
public static final int REFRESH = 5;
}
}
| kii-dev-jenkins/KiiFileStorageSampleApp | src/com/kii/cloud/engine/TaskType.java | Java | apache-2.0 | 1,522 |
package com.xlg.forkids.chat.holder;
import android.app.Activity;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.xlg.forkids.R;
import com.xlg.forkids.baseitems.BaseHolder;
import com.xlg.forkids.baseitems.BaseVAccount;
import com.xlg.forkids.baseitems.BaseUser;
/** Óû§Í·ÏñÇøÓò»ù±¾ÐÅÏ¢ */
public class BaseUserInfoHolder extends BaseHolder {
/** Í·Ïñ */
public ImageView Image;
/** ÐÕÃû/êÇ³Æ */
public TextView Name;
/** Õ˺Š*/
public TextView Account;
/** ÐÔ±ð */
public ImageView Sex;
/** ¸öÐÔÇ©ÃûºÍ˵Ã÷ */
public TextView Memo;
public ImageView EditMemo;
private int imageSize;
public BaseUserInfoHolder(Activity activity, View v) {
super(activity, v);
initView();
}
private void initView() {
Image = (ImageView) mRoot
.findViewById(R.id.base_corner_masked_image_image);
Name = (TextView) mRoot.findViewById(R.id.user_base_info_name);
Account = (TextView) mRoot.findViewById(R.id.user_base_info_userid);
Memo = (TextView) mRoot.findViewById(R.id.user_base_info_memo);
EditMemo = (ImageView) mRoot
.findViewById(R.id.user_base_info_memo_edit);
EditMemo.setVisibility(View.GONE);
Sex = (ImageView) mRoot.findViewById(R.id.user_base_info_sex);
}
public void setImageSize(int size) {
imageSize = size;
}
/** ÏÔʾÓû§µÄÐÅÏ¢ */
public void showContent(BaseUser user) {
Name.setText(user.getName());
Sex.setImageResource(user.getSex() == 0 ? R.drawable.personal_sex_f
: R.drawable.personal_sex_m);
Account.setText(mContext.getString(R.string.app_name) + ": "
+ user.getCode());
mFetcher.loadImage(user.getImage(), Image, imageSize, true);
}
/** ÏÔʾVÕ˺ŵÄÐÅÏ¢ */
public void showContent(BaseVAccount v) {
mFetcher.loadImage(v.getImage(), Image, imageSize, true);
Name.setText(v.getName());
Account.setText(mContext.getString(R.string.unions_title_text) + ": "
+ v.getVcode());
Memo.setText(v.getDescription());
Memo.setVisibility(View.GONE);
Sex.setVisibility(View.GONE);
}
}
| RyanTech/Forkids | ForKids/src/com/xlg/forkids/chat/holder/BaseUserInfoHolder.java | Java | apache-2.0 | 2,094 |
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using SaanSoft.AspNet.Identity3.MongoDB;
using Web.MongoDBIdentitySample.Models;
// For more information on enabling MVC for empty projects, visit http://go.microsoft.com/fwlink/?LinkID=397860
namespace Web.MongoDBIdentitySample.Controllers
{
public class DiTestController : Controller
{
private readonly RoleManager<IdentityRole> _roleManager;
private readonly UserManager<ApplicationUser> _userManager;
private readonly IIdentityDatabaseContext<ApplicationUser, IdentityRole, string> _identityDatabaseContext;
private readonly IUserStore<ApplicationUser> _userStore;
private readonly IUserLoginStore<ApplicationUser> _userLoginStore;
private readonly IUserRoleStore<ApplicationUser> _userRoleStore;
private readonly IUserClaimStore<ApplicationUser> _userClaimStore;
private readonly IUserPasswordStore<ApplicationUser> _userPasswordStore;
private readonly IUserSecurityStampStore<ApplicationUser> _userSecurityStampStore;
private readonly IUserEmailStore<ApplicationUser> _userEmailStore;
private readonly IUserLockoutStore<ApplicationUser> _userLockoutStore;
private readonly IUserPhoneNumberStore<ApplicationUser> _userPhoneNumberStore;
private readonly IUserTwoFactorStore<ApplicationUser> _userTwoFactorStore;
private readonly IQueryableUserStore<ApplicationUser> _queryableUserStore;
private readonly IRoleStore<IdentityRole> _roleStore;
private readonly IRoleClaimStore<IdentityRole> _roleClaimStore;
private readonly IQueryableRoleStore<IdentityRole> _queryableRoleStore;
public DiTestController(
// the Microsoft.AspNetCore.Identity User and Role Manager classes
RoleManager<IdentityRole> roleManager,
UserManager<ApplicationUser> userManager,
IIdentityDatabaseContext<ApplicationUser, IdentityRole, string> identityDatabaseContext,
// if want to use with SOLID and Interface Segregation Principle, then can just use the specific interface that need
// these interfaces are all implemented by UserStore
IUserStore<ApplicationUser> userStore,
IUserLoginStore<ApplicationUser> userLoginStore,
IUserRoleStore<ApplicationUser> userRoleStore,
IUserClaimStore<ApplicationUser> userClaimStore,
IUserPasswordStore<ApplicationUser> userPasswordStore,
IUserSecurityStampStore<ApplicationUser> userSecurityStampStore,
IUserEmailStore<ApplicationUser> userEmailStore,
IUserLockoutStore<ApplicationUser> userLockoutStore,
IUserPhoneNumberStore<ApplicationUser> userPhoneNumberStore,
IUserTwoFactorStore<ApplicationUser> userTwoFactorStore,
IQueryableUserStore<ApplicationUser> queryableUserStore,
// these interfaces are all implemented by RoleStore
IRoleStore<IdentityRole> roleStore,
IRoleClaimStore<IdentityRole> roleClaimStore,
IQueryableRoleStore<IdentityRole> queryableRoleStore
)
{
_roleManager = roleManager;
_userManager = userManager;
_identityDatabaseContext = identityDatabaseContext;
_userStore = userStore;
_userLoginStore = userLoginStore;
_userRoleStore = userRoleStore;
_userClaimStore = userClaimStore;
_userPasswordStore = userPasswordStore;
_userSecurityStampStore = userSecurityStampStore;
_userEmailStore = userEmailStore;
_userLockoutStore = userLockoutStore;
_userPhoneNumberStore = userPhoneNumberStore;
_userTwoFactorStore = userTwoFactorStore;
_queryableUserStore = queryableUserStore;
_roleStore = roleStore;
_roleClaimStore = roleClaimStore;
_queryableRoleStore = queryableRoleStore;
}
// GET: /<controller>/
public IActionResult Index()
{
return View();
}
}
}
| Amevacorp/SaanSoft.AspNet.Identity3.MongoDB | samples/Web.MongoDBIdentitySample/Controllers/DiTestController.cs | C# | apache-2.0 | 3,643 |
/*
* Copyright 2014,2016 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cherry.elemental.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.List;
import java.util.Map;
import org.junit.Test;
public class ToMapUtilTest {
@Test
public void testFromThrowable_00() {
Exception ex = new Exception("MESSAGE");
Map<String, Object> map = ToMapUtil.fromThrowable(ex, Integer.MAX_VALUE);
assertEquals("MESSAGE", map.get("message"));
assertNotNull(map.get("stackTrace"));
assertTrue(map.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
List<String> list = (List<String>) map.get("stackTrace");
assertFalse(list.isEmpty());
assertEquals("cherry.elemental.util.ToMapUtilTest.testFromThrowable_00(ToMapUtilTest.java:36)", list.get(0));
assertNull(map.get("cause"));
}
@Test
public void testFromThrowable_01() {
Exception ex = new Exception("MESSAGE");
Map<String, Object> map = ToMapUtil.fromThrowable(ex, 0);
assertEquals("MESSAGE", map.get("message"));
assertNotNull(map.get("stackTrace"));
assertTrue(map.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
List<String> list = (List<String>) map.get("stackTrace");
assertFalse(list.isEmpty());
assertEquals("...", list.get(0));
assertNull(map.get("cause"));
}
@Test
public void testFromThrowable_02() {
Exception cause = new Exception("CAUSE");
Exception ex = new Exception("MESSAGE", cause);
Map<String, Object> map = ToMapUtil.fromThrowable(ex, 1);
assertEquals("MESSAGE", map.get("message"));
assertNotNull(map.get("stackTrace"));
assertTrue(map.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
List<String> list = (List<String>) map.get("stackTrace");
assertFalse(list.isEmpty());
assertEquals("cherry.elemental.util.ToMapUtilTest.testFromThrowable_02(ToMapUtilTest.java:71)", list.get(0));
assertNotNull(map.get("cause"));
@SuppressWarnings("unchecked")
Map<String, Object> map2 = (Map<String, Object>) map.get("cause");
assertEquals("CAUSE", map2.get("message"));
assertNotNull(map2.get("stackTrace"));
assertTrue(map2.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
List<String> list2 = (List<String>) map2.get("stackTrace");
assertFalse(list2.isEmpty());
assertEquals("...", list2.get(0));
}
@Test
public void testInstantiate() {
try {
new ToMapUtil();
} catch (Exception ex) {
fail("Exception must not be thrown");
}
}
}
| agwlvssainokuni/springapp2 | corelib/elemental/src/test/java/cherry/elemental/util/ToMapUtilTest.java | Java | apache-2.0 | 3,335 |
package com.hackerrank.test.ut;
import static org.junit.Assert.*;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Test;
import com.hackerrank.test.FillingJars;
public class FillingJarsTestCase {
@AfterClass
public static void tearDownAfterClass() throws Exception {
}
@Test
public void testGetAllCandies() {
long a =1, b=2, k=100;
long total = FillingJars.getAllCandies( a, b, k);
String message ="Get expected candy count.";
assertEquals(200, total);
}
}
| haokaibo/HankerRank | AmazonTest/src/com/hackerrank/test/ut/FillingJarsTestCase.java | Java | apache-2.0 | 500 |
/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var gulp = require('gulp-help')(require('gulp'));
var path = require('path');
var srcGlobs = require('../config').presubmitGlobs;
var util = require('gulp-util');
var dedicatedCopyrightNoteSources = /(\.js|\.css|\.go)$/;
var es6polyfill = 'Not available because we do not currently' +
' ship with a needed ES6 polyfill.';
var requiresReviewPrivacy =
'Usage of this API requires dedicated review due to ' +
'being privacy sensitive. Please file an issue asking for permission' +
' to use if you have not yet done so.';
var privateServiceFactory = 'This service should only be installed in ' +
'the whitelisted files. Other modules should use a public function ' +
'typically called serviceNameFor.';
var shouldNeverBeUsed =
'Usage of this API is not allowed - only for internal purposes.';
// Terms that must not appear in our source files.
var forbiddenTerms = {
'DO NOT SUBMIT': '',
'describe\\.only': '',
'it\\.only': '',
'sinon\\.(spy|stub|mock)\\(\\w[^)]*\\)': {
message: 'Use a sandbox instead to avoid repeated `#restore` calls'
},
'(\\w*([sS]py|[sS]tub|[mM]ock|clock).restore)': {
message: 'Use a sandbox instead to avoid repeated `#restore` calls'
},
'sinon\\.useFake\\w+': {
message: 'Use a sandbox instead to avoid repeated `#restore` calls'
},
'console\\.\\w+\\(': {
message: 'If you run against this, use console/*OK*/.log to ' +
'whitelist a legit case.',
// TODO: temporary, remove when validator is up to date
whitelist: [
'validator/validator.js',
'validator/parse-css.js',
'validator/validator-in-browser.js',
]
},
'iframePing': {
message: 'This is only available in vendor config for ' +
'temporary workarounds.',
whitelist: [
'extensions/amp-analytics/0.1/amp-analytics.js',
],
},
// Service factories that should only be installed once.
'installActionService': {
message: privateServiceFactory,
whitelist: [
'src/service/action-impl.js',
'src/service/standard-actions-impl.js',
'src/amp-core-service.js',
],
},
'installActionHandler': {
message: privateServiceFactory,
whitelist: [
'src/service/action-impl.js',
'extensions/amp-access/0.1/amp-access.js',
],
},
'installActivityService': {
message: privateServiceFactory,
whitelist: [
'src/service/activity-impl.js',
'extensions/amp-analytics/0.1/amp-analytics.js'
]
},
'installCidService': {
message: privateServiceFactory,
whitelist: [
'src/service/cid-impl.js',
'extensions/amp-access/0.1/amp-access.js',
'extensions/amp-analytics/0.1/amp-analytics.js',
],
},
'installStorageService': {
message: privateServiceFactory,
whitelist: [
'extensions/amp-analytics/0.1/amp-analytics.js',
'src/service/storage-impl.js',
],
},
'installViewerService': {
message: privateServiceFactory,
whitelist: [
'src/amp-core-service.js',
'src/service/history-impl.js',
'src/service/resources-impl.js',
'src/service/viewer-impl.js',
'src/service/viewport-impl.js',
'src/service/vsync-impl.js',
],
},
'installViewportService': {
message: privateServiceFactory,
whitelist: [
'src/amp-core-service.js',
'src/service/resources-impl.js',
'src/service/viewport-impl.js',
],
},
'installVsyncService': {
message: privateServiceFactory,
whitelist: [
'src/amp-core-service.js',
'src/service/resources-impl.js',
'src/service/viewport-impl.js',
'src/service/vsync-impl.js',
],
},
'installResourcesService': {
message: privateServiceFactory,
whitelist: [
'src/amp-core-service.js',
'src/service/resources-impl.js',
'src/service/standard-actions-impl.js',
],
},
'sendMessage': {
message: privateServiceFactory,
whitelist: [
'src/service/viewer-impl.js',
'src/service/storage-impl.js',
'examples/viewer-integr-messaging.js',
'extensions/amp-access/0.1/login-dialog.js',
],
},
// Privacy sensitive
'cidFor': {
message: requiresReviewPrivacy,
whitelist: [
'builtins/amp-ad.js',
'src/cid.js',
'src/service/cid-impl.js',
'src/url-replacements.js',
'extensions/amp-access/0.1/amp-access.js',
'extensions/amp-user-notification/0.1/amp-user-notification.js',
],
},
'getBaseCid': {
message: requiresReviewPrivacy,
whitelist: [
'src/service/cid-impl.js',
'src/service/viewer-impl.js',
],
},
'cookie\\W': {
message: requiresReviewPrivacy,
whitelist: [
'src/cookies.js',
'src/service/cid-impl.js',
],
},
'getCookie\\W': {
message: requiresReviewPrivacy,
whitelist: [
'src/service/cid-impl.js',
'src/cookies.js',
'src/experiments.js',
'tools/experiments/experiments.js',
]
},
'setCookie\\W': {
message: requiresReviewPrivacy,
whitelist: [
'src/service/cid-impl.js',
'src/cookies.js',
'src/experiments.js',
'tools/experiments/experiments.js',
]
},
'isDevChannel\\W': {
message: requiresReviewPrivacy,
whitelist: [
'extensions/amp-access/0.1/amp-access.js',
'extensions/amp-user-notification/0.1/amp-user-notification.js',
'src/3p-frame.js',
'src/experiments.js',
'src/service/storage-impl.js',
'src/service/viewport-impl.js',
'tools/experiments/experiments.js',
]
},
'isDevChannelVersionDoNotUse_\\W': {
message: shouldNeverBeUsed,
whitelist: [
'src/experiments.js',
]
},
'isTrusted': {
message: requiresReviewPrivacy,
whitelist: [
'src/service/viewer-impl.js',
]
},
'eval\\(': '',
'storageFor': {
message: requiresReviewPrivacy,
whitelist: [
'src/storage.js',
'extensions/amp-user-notification/0.1/amp-user-notification.js',
],
},
'localStorage': {
message: requiresReviewPrivacy,
whitelist: [
'src/service/cid-impl.js',
'src/service/storage-impl.js',
],
},
'sessionStorage': requiresReviewPrivacy,
'indexedDB': requiresReviewPrivacy,
'openDatabase': requiresReviewPrivacy,
'requestFileSystem': requiresReviewPrivacy,
'webkitRequestFileSystem': requiresReviewPrivacy,
'getAccessReaderId': {
message: requiresReviewPrivacy,
whitelist: [
'extensions/amp-access/0.1/amp-access.js',
'src/url-replacements.js',
]
},
'getAuthdataField': {
message: requiresReviewPrivacy,
whitelist: [
'extensions/amp-access/0.1/amp-access.js',
'src/url-replacements.js',
]
},
'debugger': '',
// ES6. These are only the most commonly used.
'Array\\.of': es6polyfill,
// These currently depend on core-js/modules/web.dom.iterable which
// we don't want. That decision could be reconsidered.
'\\.startsWith': {
message: es6polyfill,
whitelist: [
'validator/tokenize-css.js',
'validator/validator.js'
]
},
'\\.endsWith': {
message: es6polyfill,
whitelist: [
// .endsWith occurs in babel generated code.
'dist.3p/current/integration.js',
],
},
// TODO: (erwinm) rewrite the destructure and spread warnings as
// eslint rules (takes more time than this quick regex fix).
// No destructuring allowed since we dont ship with Array polyfills.
'^\\s*(?:let|const|var) *(?:\\[[^\\]]+\\]|{[^}]+}) *=': es6polyfill,
// No spread (eg. test(...args) allowed since we dont ship with Array
// polyfills except `arguments` spread as babel does not polyfill
// it since it can assume that it can `slice` w/o the use of helpers.
'\\.\\.\\.(?!arguments\\))[_$A-Za-z0-9]*(?:\\)|])': {
message: es6polyfill,
whitelist: [
'extensions/amp-access/0.1/access-expr-impl.js',
],
},
// Overridden APIs.
'(doc.*)\\.referrer': {
message: 'Use Viewer.getReferrerUrl() instead.',
whitelist: [
'3p/integration.js',
'dist.3p/current/integration.js',
'src/service/viewer-impl.js',
'src/error.js',
],
},
'(doc[^.]*)\\.contains': {
message: 'Use dom.documentContains API.',
whitelist: [
'src/dom.js',
],
},
'\\sdocument(?![a-zA-Z0-9_])': {
message: 'Use `window.document` or similar to access document, the global' +
'`document` is forbidden',
whitelist: [
'validator/validator.js',
'testing/iframe.js',
'testing/screenshots/make-screenshot.js',
'tools/experiments/experiments.js',
'examples/viewer-integr.js',
],
},
'getUnconfirmedReferrerUrl': {
message: 'Use Viewer.getReferrerUrl() instead.',
whitelist: [
'extensions/amp-dynamic-css-classes/0.1/amp-dynamic-css-classes.js',
'src/3p-frame.js',
'src/service/viewer-impl.js',
],
},
'setTimeout.*throw': {
message: 'Use dev.error or user.error instead.',
whitelist: [
'src/log.js',
],
},
};
var ThreePTermsMessage = 'The 3p bootstrap iframe has no polyfills loaded and' +
' can thus not use most modern web APIs.';
var forbidden3pTerms = {
// We need to forbid promise usage because we don't have our own polyfill
// available. This whitelisting of callNext is a major hack to allow one
// usage in babel's external helpers that is in a code path that we do
// not use.
'\\.then\\((?!callNext)': ThreePTermsMessage,
'Math\\.sign' : ThreePTermsMessage,
};
var bannedTermsHelpString = 'Please review viewport.js for a helper method ' +
'or mark with `/*OK*/` or `/*REVIEW*/` and consult the AMP team. ' +
'Most of the forbidden property/method access banned on the ' +
'`forbiddenTermsSrcInclusive` object can be found in ' +
'[What forces layout / reflow gist by Paul Irish]' +
'(https://gist.github.com/paulirish/5d52fb081b3570c81e3a). ' +
'These properties/methods when read/used require the browser ' +
'to have the up-to-date value to return which might possibly be an ' +
'expensive computation and could also be triggered multiple times ' +
'if we are not careful. Please mark the call with ' +
'`object./*OK*/property` if you explicitly need to read or update the ' +
'forbidden property/method or mark it with `object./*REVIEW*/property` ' +
'if you are unsure and so that it stands out in code reviews.';
var forbiddenTermsSrcInclusive = {
'\\.innerHTML(?!_)': bannedTermsHelpString,
'\\.outerHTML(?!_)': bannedTermsHelpString,
'\\.postMessage(?!_)': bannedTermsHelpString,
'\\.offsetLeft(?!_)': bannedTermsHelpString,
'\\.offsetTop(?!_)': bannedTermsHelpString,
'\\.offsetWidth(?!_)': bannedTermsHelpString,
'\\.offsetHeight(?!_)': bannedTermsHelpString,
'\\.offsetParent(?!_)': bannedTermsHelpString,
'\\.clientLeft(?!_)(?!_)': bannedTermsHelpString,
'\\.clientTop(?!_)': bannedTermsHelpString,
'\\.clientWidth(?!_)': bannedTermsHelpString,
'\\.clientHeight(?!_)': bannedTermsHelpString,
'\\.getClientRects(?!_)': bannedTermsHelpString,
'\\.getBoundingClientRect(?!_)': bannedTermsHelpString,
'\\.scrollBy(?!_)': bannedTermsHelpString,
'\\.scrollTo(?!_|p|p_)': bannedTermsHelpString,
'\\.scrollIntoView(?!_)': bannedTermsHelpString,
'\\.scrollIntoViewIfNeeded(?!_)': bannedTermsHelpString,
'\\.scrollWidth(?!_)': 'please use `getScrollWidth()` from viewport',
'\\.scrollHeight(?!_)': bannedTermsHelpString,
'\\.scrollTop(?!_)': bannedTermsHelpString,
'\\.scrollLeft(?!_)': bannedTermsHelpString,
'\\.focus(?!_)': bannedTermsHelpString,
'\\.computedRole(?!_)': bannedTermsHelpString,
'\\.computedName(?!_)': bannedTermsHelpString,
'\\.innerText(?!_)': bannedTermsHelpString,
'\\.getComputedStyle(?!_)': bannedTermsHelpString,
'\\.scrollX(?!_)': bannedTermsHelpString,
'\\.scrollY(?!_)': bannedTermsHelpString,
'\\.pageXOffset(?!_)': bannedTermsHelpString,
'\\.pageYOffset(?!_)': bannedTermsHelpString,
'\\.innerWidth(?!_)': bannedTermsHelpString,
'\\.innerHeight(?!_)': bannedTermsHelpString,
'\\.getMatchedCSSRules(?!_)': bannedTermsHelpString,
'\\.scrollingElement(?!_)': bannedTermsHelpString,
'\\.computeCTM(?!_)': bannedTermsHelpString,
'\\.getBBox(?!_)': bannedTermsHelpString,
'\\.webkitConvertPointFromNodeToPage(?!_)': bannedTermsHelpString,
'\\.webkitConvertPointFromPageToNode(?!_)': bannedTermsHelpString,
'\\.changeHeight(?!_)': bannedTermsHelpString,
'\\.changeSize(?!_)': bannedTermsHelpString,
'reject\\(\\)': {
message: 'Always supply a reason in rejections. ' +
'error.cancellation() may be applicable.',
whitelist: [
'extensions/amp-access/0.1/access-expr-impl.js',
],
}
};
// Terms that must appear in a source file.
var requiredTerms = {
'Copyright 20(15|16) The AMP HTML Authors\\.':
dedicatedCopyrightNoteSources,
'Licensed under the Apache License, Version 2\\.0':
dedicatedCopyrightNoteSources,
'http\\://www\\.apache\\.org/licenses/LICENSE-2\\.0':
dedicatedCopyrightNoteSources,
};
/**
* Check if root of path is test/ or file is in a folder named test.
* @param {string} path
* @return {boolean}
*/
function isInTestFolder(path) {
var dirs = path.split('/');
var folder = dirs[dirs.length - 2];
return path.startsWith('test/') || folder == 'test';
}
function stripComments(contents) {
// Multi-line comments
contents = contents.replace(/\/\*(?!.*\*\/)(.|\n)*?\*\//g, '');
// Single line comments with only leading whitespace
contents = contents.replace(/\n\s*\/\/.*/g, '');
// Single line comments following a space, semi-colon, or closing brace
return contents.replace(/( |\}|;)\s*\/\/.*/g, '$1');
}
/**
* Logs any issues found in the contents of file based on terms (regex
* patterns), and provides any possible fix information for matched terms if
* possible
*
* @param {!File} file a vinyl file object to scan for term matches
* @param {!Array<string, string>} terms Pairs of regex patterns and possible
* fix messages.
* @return {boolean} true if any of the terms match the file content,
* false otherwise
*/
function matchTerms(file, terms) {
var pathname = file.path;
var contents = stripComments(file.contents.toString());
var relative = file.relative;
return Object.keys(terms).map(function(term) {
var fix;
var whitelist = terms[term].whitelist;
// NOTE: we could do a glob test instead of exact check in the future
// if needed but that might be too permissive.
if (Array.isArray(whitelist) && (whitelist.indexOf(relative) != -1 ||
isInTestFolder(relative))) {
return false;
}
// we can't optimize building the `RegExp` objects early unless we build
// another mapping of term -> regexp object to be able to get back to the
// original term to get the possible fix value. This is ok as the
// presubmit doesn't have to be blazing fast and this is most likely
// negligible.
var matches = contents.match(new RegExp(term, 'gm'));
if (matches) {
util.log(util.colors.red('Found forbidden: "' + matches[0] +
'" in ' + relative));
if (typeof terms[term] == 'string') {
fix = terms[term];
} else {
fix = terms[term].message;
}
// log the possible fix information if provided for the term.
if (fix) {
util.log(util.colors.blue(fix));
}
util.log(util.colors.blue('=========='));
return true;
}
return false;
}).some(function(hasAnyTerm) {
return hasAnyTerm;
});
}
/**
* Test if a file's contents match any of the
* forbidden terms
*
* @param {!File} file file is a vinyl file object
* @return {boolean} true if any of the terms match the file content,
* false otherwise
*/
function hasAnyTerms(file) {
var pathname = file.path;
var basename = path.basename(pathname);
var hasTerms = false;
var hasSrcInclusiveTerms = false;
var has3pTerms = false;
hasTerms = matchTerms(file, forbiddenTerms);
var isTestFile = /^test-/.test(basename) || /^_init_tests/.test(basename);
if (!isTestFile) {
hasSrcInclusiveTerms = matchTerms(file, forbiddenTermsSrcInclusive);
}
var is3pFile = /3p|ads/.test(pathname) ||
basename == '3p.js' ||
basename == 'style.js';
if (is3pFile && !isTestFile) {
has3pTerms = matchTerms(file, forbidden3pTerms);
}
return hasTerms || hasSrcInclusiveTerms || has3pTerms;
}
/**
* Test if a file's contents fail to match any of the required terms and log
* any missing terms
*
* @param {!File} file file is a vinyl file object
* @return {boolean} true if any of the terms are not matched in the file
* content, false otherwise
*/
function isMissingTerms(file) {
var contents = file.contents.toString();
return Object.keys(requiredTerms).map(function(term) {
var filter = requiredTerms[term];
if (!filter.test(file.path)) {
return false;
}
var matches = contents.match(new RegExp(term));
if (!matches) {
util.log(util.colors.red('Did not find required: "' + term +
'" in ' + file.relative));
util.log(util.colors.blue('=========='));
return true;
}
return false;
}).some(function(hasMissingTerm) {
return hasMissingTerm;
});
}
/**
* Check a file for all the required terms and
* any forbidden terms and log any errors found.
*/
function checkForbiddenAndRequiredTerms() {
var forbiddenFound = false;
var missingRequirements = false;
return gulp.src(srcGlobs)
.pipe(util.buffer(function(err, files) {
forbiddenFound = files.map(hasAnyTerms).some(function(errorFound) {
return errorFound;
});
missingRequirements = files.map(isMissingTerms).some(
function(errorFound) {
return errorFound;
});
}))
.on('end', function() {
if (forbiddenFound) {
util.log(util.colors.blue(
'Please remove these usages or consult with the AMP team.'));
}
if (missingRequirements) {
util.log(util.colors.blue(
'Adding these terms (e.g. by adding a required LICENSE ' +
'to the file)'));
}
if (forbiddenFound || missingRequirements) {
process.exit(1);
}
});
}
gulp.task('presubmit', 'Run validation against files to check for forbidden ' +
'and required terms', checkForbiddenAndRequiredTerms);
| nekodo/amphtml | build-system/tasks/presubmit-checks.js | JavaScript | apache-2.0 | 18,988 |
/*
* Copyright 2011-2012 Gregory P. Moyer
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.syphr.mythtv.ws.backend.impl;
import java.io.IOException;
import java.util.Calendar;
import javax.xml.ws.BindingProvider;
import org.syphr.mythtv.data.Program;
import org.syphr.mythtv.ws.ServiceVersionException;
import org.syphr.mythtv.ws.backend.GuideService;
import org.syphr.mythtv.ws.backend.impl._0_25.guide.Guide;
import org.syphr.mythtv.ws.backend.impl._0_25.guide.GuideServices;
import org.syphr.mythtv.ws.backend.impl._0_25.guide.ProgramGuide;
import org.syphr.mythtv.ws.impl.AbstractService;
public class GuideService0_25 extends AbstractService implements GuideService
{
private static final String NAME = "Guide";
private static final String VERSION = "1.0";
private final Guide service;
public GuideService0_25(String host, int port) throws ServiceVersionException, IOException
{
GuideServices locator = new GuideServices();
service = locator.getBasicHttpBindingGuide();
configureAndVerify(host, port, (BindingProvider)service);
}
@Override
protected String getName()
{
return NAME;
}
@Override
protected String getVersion()
{
return VERSION;
}
@Override
public String getChannelIcon(Integer chanId, Integer width, Integer height)
{
return service.getChannelIcon(chanId, width, height);
}
@Override
public Program getProgramDetails(Integer chanId, Calendar startTime)
{
// TODO
return null;//service.getProgramDetails(chanId, startTime);
}
@Override
public ProgramGuide getProgramGuide(Calendar startTime,
Calendar endTime,
Integer startChanId,
Integer numChannels,
Boolean details)
{
return service.getProgramGuide(startTime, endTime, startChanId, numChannels, details);
}
}
| syphr42/libmythtv-java | ws/src/main/java/org/syphr/mythtv/ws/backend/impl/GuideService0_25.java | Java | apache-2.0 | 2,555 |
package com.g10.ssm.service;
import java.util.List;
import com.g10.ssm.po.LearningTaskCoursewareKey;
public interface LearningTaskCoursewareService {
public List<LearningTaskCoursewareKey> queryLearningTaskCourseware() throws Exception;
public String[] queryAllCoursewareUrl(Integer learningTaskId) throws Exception;
public Integer[] getAllCoursewareId(Integer learningTaskId) throws Exception;
/*
* public int updateLearningTaskCourseware(LearningTaskCoursewareKey
* testTable) throws Exception;
*/
public int saveLearningTaskCourseware(LearningTaskCoursewareKey learningTaskCourseware) throws Exception;
public int deleteLearningTaskCoursewareByPrimaryKey(LearningTaskCoursewareKey learningTaskCourseware)
throws Exception;
public int deleteLearningTaskCourseware(Integer learningTaskId) throws Exception;
}
| scaug10/NETESP | src/main/java/com/g10/ssm/service/LearningTaskCoursewareService.java | Java | apache-2.0 | 835 |
import { Component, HostListener, Inject } from '@angular/core';
import { MAT_DIALOG_DATA, MatDialogRef } from '@angular/material';
@Component({
selector: 'cs-volume-delete-dialog',
templateUrl: './volume-delete-dialog.component.html',
styleUrls: ['./volume-delete-dialog.component.scss'],
})
export class VolumeDeleteDialogComponent {
public deleteSnapshots = false;
constructor(
public dialogRef: MatDialogRef<VolumeDeleteDialogComponent>,
@Inject(MAT_DIALOG_DATA) public hasSnapshots: boolean,
) {}
public confirmDestroy(): void {
const result: { deleteSnapshots?: boolean } = {};
if (this.deleteSnapshots) {
result.deleteSnapshots = true;
}
this.dialogRef.close(result);
}
@HostListener('keydown.esc')
public onEsc(): void {
this.dialogRef.close();
}
}
| bwsw/cloudstack-ui | src/app/shared/actions/volume-actions/volume-delete/volume-delete-dialog.component.ts | TypeScript | apache-2.0 | 817 |
using System;
using System.Xml.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Senparc.Weixin.MP.Entities;
namespace Senparc.Weixin.MP.Test.MessageHandlers
{
public partial class MessageHandlersTest
{
#region 微信认证事件推送
/// <summary>
/// 微信认证事件测试
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="xml"></param>
/// <param name="eventType"></param>
/// <returns></returns>
private CustomMessageHandlers VerifyEventTest<T>(string xml, Event eventType)
where T : RequestMessageEventBase
{
var messageHandlers = new CustomMessageHandlers(XDocument.Parse(xml));
Assert.IsNotNull(messageHandlers.RequestDocument);
messageHandlers.Execute();
Assert.IsNotNull(messageHandlers.TextResponseMessage);
var requestMessage = messageHandlers.RequestMessage as T;
Assert.IsNotNull(requestMessage);
Assert.AreEqual(eventType, requestMessage.Event);
return messageHandlers;
}
[TestMethod]
public void QualificationVerifySuccessTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442401156</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[qualification_verify_success]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml> ";
var messageHandler = VerifyEventTest<RequestMessageEvent_QualificationVerifySuccess>(xml,Event.qualification_verify_success);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_QualificationVerifySuccess;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.ExpiredTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
[TestMethod]
public void QualificationVerifyFailTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442401156</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[qualification_verify_fail]]></Event>
<FailTime>1442401156</FailTime>
<FailReason><![CDATA[by time]]></FailReason>
</xml>";
var messageHandler = VerifyEventTest<RequestMessageEvent_QualificationVerifyFail>(xml, Event.qualification_verify_fail);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_QualificationVerifyFail;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.FailTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("by time", requestMessage.FailReason);
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
[TestMethod]
public void NamingVerifySuccessTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442401093</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[naming_verify_success]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml> ";
var messageHandler = VerifyEventTest<RequestMessageEvent_NamingVerifySuccess>(xml, Event.naming_verify_success);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_NamingVerifySuccess;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.ExpiredTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
[TestMethod]
public void NamingVerifyFailTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442401061</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[naming_verify_fail]]></Event>
<FailTime>1442401156</FailTime>
<FailReason><![CDATA[by time 2]]></FailReason>
</xml>";
var messageHandler = VerifyEventTest<RequestMessageEvent_NamingVerifyFail>(xml, Event.naming_verify_fail);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_NamingVerifyFail;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.FailTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("by time 2", requestMessage.FailReason);
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
[TestMethod]
public void AnnualRenewTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442401004</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[annual_renew]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml>";
var messageHandler = VerifyEventTest<RequestMessageEvent_AnnualRenew>(xml, Event.annual_renew);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_AnnualRenew;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.ExpiredTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
[TestMethod]
public void VerifyExpiredTest()
{
var xml = @"<xml><ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1442400900</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[verify_expired]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml>";
var messageHandler = VerifyEventTest<RequestMessageEvent_VerifyExpired>(xml, Event.verify_expired);
var requestMessage = messageHandler.RequestMessage as RequestMessageEvent_VerifyExpired;
Assert.AreEqual("2015-09-16 18:59:16", requestMessage.ExpiredTime.ToString("yyyy-MM-dd HH:mm:ss"));
Assert.AreEqual("success", messageHandler.TextResponseMessage);
}
#endregion
}
}
| down4u/WeiXinMPSDK | src/Senparc.Weixin.MP/Senparc.Weixin.MP.Test/MessageHandlers/MessageHandlersTest.Event.cs | C# | apache-2.0 | 6,144 |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.lang.api;
import net.sf.mmm.util.lang.api.attribute.AttributeReadValue;
/**
* This enum contains the available values for the orientation.
*
* @see Alignment
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 3.0.0
*/
public enum Orientation implements AttributeReadValue<String> {
/**
* A horizontal orientation means that objects are ordered from the left to the right.
*/
HORIZONTAL("-", "horizontal"),
/**
* A vertical orientation means that objects are ordered from the top to the bottom.
*/
VERTICAL("|", "vertical");
private final String value;
private final String title;
/**
* The constructor.
*
* @param value is the {@link #getValue() raw value} (symbol).
* @param title is the {@link #toString() string representation}.
*/
private Orientation(String value, String title) {
this.value = value;
this.title = title;
}
/**
* @return the ascii symbol.
*/
@Override
public String getValue() {
return this.value;
}
@Override
public String toString() {
return this.title;
}
/**
* This method gets the {@link Orientation} with the given {@link #getValue() value}.
*
* @param value is the {@link #getValue() value} of the requested {@link Orientation}.
* @return the requested {@link Orientation}.
*/
public static Orientation fromValue(String value) {
for (Orientation alignment : values()) {
if (alignment.value.equals(value)) {
return alignment;
}
}
return null;
}
/**
* This method gets the inverse orientation.
*
* @return {@link #VERTICAL} if this orientation is {@link #HORIZONTAL} and vice versa.
*/
public Orientation getMirrored() {
if (this == HORIZONTAL) {
return VERTICAL;
} else {
return HORIZONTAL;
}
}
}
| m-m-m/util | lang/src/main/java/net/sf/mmm/util/lang/api/Orientation.java | Java | apache-2.0 | 1,997 |
package io.github.mapstream;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Optional;
import java.util.Spliterator;
import java.util.function.*;
import java.util.stream.*;
class PairEntryStreamImpl<K, V> implements PairEntryStream<K, V> {
private Stream<PairEntry<K, V>> delegate;
PairEntryStreamImpl(Stream<PairEntry<K, V>> delegate) {
this.delegate = delegate;
}
@Override
public MapStream<K, V> mapStream() {
return MapStream.from(delegate);
}
/* EVERYTHING DELEGATED */
@Override
public Stream<PairEntry<K, V>> filter(Predicate<? super PairEntry<K, V>> predicate) {
return delegate.filter(predicate);
}
@Override
public <R> Stream<R> map(Function<? super PairEntry<K, V>, ? extends R> mapper) {
return delegate.map(mapper);
}
@Override
public IntStream mapToInt(ToIntFunction<? super PairEntry<K, V>> mapper) {
return delegate.mapToInt(mapper);
}
@Override
public LongStream mapToLong(ToLongFunction<? super PairEntry<K, V>> mapper) {
return delegate.mapToLong(mapper);
}
@Override
public DoubleStream mapToDouble(ToDoubleFunction<? super PairEntry<K, V>> mapper) {
return delegate.mapToDouble(mapper);
}
@Override
public <R> Stream<R> flatMap(Function<? super PairEntry<K, V>, ? extends Stream<? extends R>> mapper) {
return delegate.flatMap(mapper);
}
@Override
public IntStream flatMapToInt(Function<? super PairEntry<K, V>, ? extends IntStream> mapper) {
return delegate.flatMapToInt(mapper);
}
@Override
public LongStream flatMapToLong(Function<? super PairEntry<K, V>, ? extends LongStream> mapper) {
return delegate.flatMapToLong(mapper);
}
@Override
public DoubleStream flatMapToDouble(Function<? super PairEntry<K, V>, ? extends DoubleStream> mapper) {
return delegate.flatMapToDouble(mapper);
}
@Override
public Stream<PairEntry<K, V>> distinct() {
return delegate.distinct();
}
@Override
public Stream<PairEntry<K, V>> sorted() {
return delegate.sorted();
}
@Override
public Stream<PairEntry<K, V>> sorted(Comparator<? super PairEntry<K, V>> comparator) {
return delegate.sorted(comparator);
}
@Override
public Stream<PairEntry<K, V>> peek(Consumer<? super PairEntry<K, V>> action) {
return delegate.peek(action);
}
@Override
public Stream<PairEntry<K, V>> limit(long maxSize) {
return delegate.limit(maxSize);
}
@Override
public Stream<PairEntry<K, V>> skip(long n) {
return delegate.skip(n);
}
@Override
public void forEach(Consumer<? super PairEntry<K, V>> action) {
delegate.forEach(action);
}
@Override
public void forEachOrdered(Consumer<? super PairEntry<K, V>> action) {
delegate.forEachOrdered(action);
}
@Override
public Object[] toArray() {
return delegate.toArray();
}
@Override
public <A> A[] toArray(IntFunction<A[]> generator) {
return delegate.toArray(generator);
}
@Override
public PairEntry<K, V> reduce(PairEntry<K, V> identity, BinaryOperator<PairEntry<K, V>> accumulator) {
return delegate.reduce(identity, accumulator);
}
@Override
public Optional<PairEntry<K, V>> reduce(BinaryOperator<PairEntry<K, V>> accumulator) {
return delegate.reduce(accumulator);
}
@Override
public <U> U reduce(U identity, BiFunction<U, ? super PairEntry<K, V>, U> accumulator, BinaryOperator<U> combiner) {
return delegate.reduce(identity, accumulator, combiner);
}
@Override
public <R> R collect(Supplier<R> supplier, BiConsumer<R, ? super PairEntry<K, V>> accumulator, BiConsumer<R, R> combiner) {
return delegate.collect(supplier, accumulator, combiner);
}
@Override
public <R, A> R collect(Collector<? super PairEntry<K, V>, A, R> collector) {
return delegate.collect(collector);
}
@Override
public Optional<PairEntry<K, V>> min(Comparator<? super PairEntry<K, V>> comparator) {
return delegate.min(comparator);
}
@Override
public Optional<PairEntry<K, V>> max(Comparator<? super PairEntry<K, V>> comparator) {
return delegate.max(comparator);
}
@Override
public long count() {
return delegate.count();
}
@Override
public boolean anyMatch(Predicate<? super PairEntry<K, V>> predicate) {
return delegate.anyMatch(predicate);
}
@Override
public boolean allMatch(Predicate<? super PairEntry<K, V>> predicate) {
return delegate.allMatch(predicate);
}
@Override
public boolean noneMatch(Predicate<? super PairEntry<K, V>> predicate) {
return delegate.noneMatch(predicate);
}
@Override
public Optional<PairEntry<K, V>> findFirst() {
return delegate.findFirst();
}
@Override
public Optional<PairEntry<K, V>> findAny() {
return delegate.findAny();
}
@Override
public Iterator<PairEntry<K, V>> iterator() {
return delegate.iterator();
}
@Override
public Spliterator<PairEntry<K, V>> spliterator() {
return delegate.spliterator();
}
@Override
public boolean isParallel() {
return delegate.isParallel();
}
@Override
public Stream<PairEntry<K, V>> sequential() {
return delegate.sequential();
}
@Override
public Stream<PairEntry<K, V>> parallel() {
return delegate.parallel();
}
@Override
public Stream<PairEntry<K, V>> unordered() {
return delegate.unordered();
}
@Override
public Stream<PairEntry<K, V>> onClose(Runnable closeHandler) {
return delegate.onClose(closeHandler);
}
@Override
public void close() {
delegate.close();
}
@Override
public String toString() {
return "PairEntryStream{" +
"delegate=" + delegate +
'}';
}
}
| mapstream/mapstream | src/main/java/io/github/mapstream/PairEntryStreamImpl.java | Java | apache-2.0 | 6,109 |
package com.github.bingoohuang.excel2beans.annotations;
public enum MergeType {
/**
* 直接合并。
*/
Direct,
/**
* 按相同值合并。
*/
SameValue,
}
| bingoohuang/excel2javabeans | src/main/java/com/github/bingoohuang/excel2beans/annotations/MergeType.java | Java | apache-2.0 | 194 |
/*******************************************************************************
* Copyright (C) 2014 Philipp B. Costa
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package br.ufc.mdcc.mpos.net.profile;
import java.io.IOException;
import java.util.Random;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.Semaphore;
import android.util.Log;
import br.ufc.mdcc.mpos.net.Protocol;
import br.ufc.mdcc.mpos.net.core.ClientAbstract;
import br.ufc.mdcc.mpos.net.core.FactoryClient;
import br.ufc.mdcc.mpos.net.core.ReceiveDataEvent;
import br.ufc.mdcc.mpos.net.endpoint.ServerContent;
import br.ufc.mdcc.mpos.net.exceptions.MissedEventException;
import br.ufc.mdcc.mpos.net.profile.model.Network;
import br.ufc.mdcc.mpos.util.TaskResult;
import br.ufc.mdcc.mpos.util.Util;
/**
* This implement a full profile client.
*
* @author Philipp B. Costa
*/
public final class ProfileNetworkFull extends ProfileNetworkTask {
private byte data[] = new byte[32 * 1024];
private Network network;
private boolean bandwidthDone = false;
public ProfileNetworkFull(TaskResult<Network> result, ServerContent server) throws MissedEventException {
super(server, result, ProfileNetworkFull.class, "ProfileFull Started on endpoint: " + server.getIp());
// randomize os dados que serão enviados
new Random().nextBytes(data);
}
/**
* Feedback code:
* 15 -> Finished Ping TCP Test
* 30 -> Finished Ping UDP Test
* 35 -> Finished Ping Test with packet loss
* 50 -> Finished Jitter Calculation
* 55 -> Start Donwload Test
* 75 -> Start Upload Test
* 100 -> Finished Conection Test
*/
@Override
protected Network doInBackground(Void... params) {
network = new Network();
try {
Log.i(clsName, "ping tcp");
long[] pings = pingService(Protocol.TCP_EVENT);
network.setResultPingTcp(pings);
publishProgress(15);
Log.i(clsName, "ping udp");
pings = pingService(Protocol.UDP_EVENT);
network.setResultPingUdp(pings);
publishProgress(30);
Log.i(clsName, "loss packet udp");
// conta os pacotes perdidos UDP
if (halted) {
return null;
}
network.setLossPacket(lossPacketCalculation(network));
publishProgress(35);
Log.i(clsName, "jitter calculation");
jitterCalculation();
if (halted) {
return null;
}
retrieveJitterResult();
publishProgress(50);
Log.i(clsName, "bandwidth calculation");
boolean finish = bandwidthCalculation();
publishProgress(100);
// a task foi cancelada ou foi parado por um timer
if (halted || !finish) {
return null;
}
Log.d(clsName, "ProfileFull Finished");
return network;
} catch (InterruptedException e) {
Log.w(clsName, e);
} catch (IOException e) {
Log.e(clsName, e.getMessage(), e);
} catch (MissedEventException e) {
Log.e(clsName, e.getMessage(), e);
}
publishProgress(100);
return null;
}
/**
* Definição: RFC 4689 - defines jitter as “the absolute value of the difference between the Forwarding Delay of two consecutive received packets
* belonging to the same stream”. The jitter is important in real-time communications when the variation between delays can cause a negative
* impact to the server quality, such voice over IP services. Referencia: http://tools.ietf.org/html/rfc4689#section-3.2.5 Em resumo o jitter
* calcula os intervalos tempos entre o intervalo de tempo (corrente) e intervalo de tempo (anterior) e deve ser enviado num fluxo de taxa
* constante. #formula no servidor Intervalo de tempo (It) = Tempo_chegada - Tempo_anterior Jitter = it_atual - it_anterior (voce pode pegar a
* média, maximo e minimo) Sobre os resultados: Um jitter de 15ms é regular, abaixo de 5ms é excelente e acima de 15ms é ruim para o padrão VoIP.
* Seja esse site: http://www.onsip.com/tools/voip-test
*
* @throws MissedEventException
* @throws IOException
* @throws InterruptedException
*/
private void jitterCalculation() throws IOException, MissedEventException, InterruptedException {
ClientAbstract client = FactoryClient.getInstance(Protocol.UDP_EVENT);
client.setReceiveDataEvent(new ReceiveDataEvent() {
@Override
public void receive(byte[] data, int offset, int read) {
Log.d(clsName, "Jitter Finish");
}
});
client.connect(server.getIp(), server.getJitterTestPort());
for (int i = 0; i < 21; i++) {
client.send(("jitter").getBytes());
// bota 250ms para retorno
// por causa do UDP que não tem controle de fluxo
Thread.sleep(250);
}
client.close();
}
private void retrieveJitterResult() throws IOException, MissedEventException, InterruptedException {
Thread.sleep(1500);
final Semaphore mutex = new Semaphore(0);
ClientAbstract client = FactoryClient.getInstance(Protocol.TCP_EVENT);
client.setReceiveDataEvent(new ReceiveDataEvent() {
@Override
public void receive(byte[] data, int offset, int read) {
Log.d(clsName, "Retrieve data from server for Jitter calcule");
network.setJitter(Integer.parseInt(new String(data, offset, read)));
// System.out.println(results.getJitter());
mutex.release();
}
});
client.connect(server.getIp(), server.getJitterRetrieveResultPort());
client.send("get".getBytes());
mutex.acquire();
client.close();
}
private boolean bandwidthCalculation() throws IOException, MissedEventException, InterruptedException {
final Semaphore mutex = new Semaphore(0);
//begin download
publishProgress(55);
ClientAbstract client = FactoryClient.getInstance(Protocol.TCP_EVENT);
client.setReceiveDataEvent(new ReceiveDataEvent() {
private long countBytes = 0L;
private byte endDown[] = "end_down".getBytes();
private byte endSession[] = "end_session".getBytes();
@Override
public void receive(byte[] data, int offset, int read) {
countBytes += (long) read;
if (Util.containsArrays(data, endDown)) {
// System.out.println("Bytes: "+countBytes);
// bytes * 8bits / 7s * 1E+6 = X Mbits
double bandwidth = ((double) (countBytes * 8L) / (double) (7.0 * 1E+6));
network.setBandwidthDownload(String.valueOf(bandwidth));
countBytes = 0L;
mutex.release();
} else if (Util.containsArrays(data, endSession)) {
bandwidthDone = true;
String dataBlock = new String(data, offset, read);
String res[] = dataBlock.split(":");
network.setBandwidthUpload(res[1]);
mutex.release();
}
}
});
// timer for finish!
Timer timeout = new Timer("Timeout Bandwidth");
timeout.schedule(new TimerTask() {
@Override
public void run() {
if (!bandwidthDone) {
// para garantir que não vai travar nenhum semaphoro!
mutex.release();
mutex.release();
Log.i(clsName, "Bandwidth Timeout...");
}
}
}, 120000);// 120s de timeout
Log.i(clsName, "bandwidth (download)");
client.connect(server.getIp(), server.getBandwidthPort());
client.send("down".getBytes());
//wait finish the down...
mutex.acquire();
//begin upload
publishProgress(75);
if (halted) {
timeout.cancel();
return false;
}
Log.i(clsName, "bandwidth (upload)");
client.send("up".getBytes());
// faz upload! - 11s
long timeExit = System.currentTimeMillis() + 11000;
while (System.currentTimeMillis() < timeExit) {
client.send(data);
}
client.send("end_up".getBytes());
Log.i(clsName, "bandwidth (ended upload)");
mutex.acquire();
client.close();
// cancela o timer
timeout.cancel();
return bandwidthDone;
}
} | ufc-great/mpos | android/MpOS API/src/br/ufc/mdcc/mpos/net/profile/ProfileNetworkFull.java | Java | apache-2.0 | 8,129 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using Microsoft.Azure.Commands.Compute.Automation.Models;
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.Compute.Automation
{
public partial class InvokeAzureComputeMethodCmdlet : ComputeAutomationBaseCmdlet
{
protected object CreateSnapshotDeleteDynamicParameters()
{
dynamicParameters = new RuntimeDefinedParameterDictionary();
var pResourceGroupName = new RuntimeDefinedParameter();
pResourceGroupName.Name = "ResourceGroupName";
pResourceGroupName.ParameterType = typeof(string);
pResourceGroupName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 1,
Mandatory = true
});
pResourceGroupName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ResourceGroupName", pResourceGroupName);
var pSnapshotName = new RuntimeDefinedParameter();
pSnapshotName.Name = "SnapshotName";
pSnapshotName.ParameterType = typeof(string);
pSnapshotName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 2,
Mandatory = true
});
pSnapshotName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("SnapshotName", pSnapshotName);
var pArgumentList = new RuntimeDefinedParameter();
pArgumentList.Name = "ArgumentList";
pArgumentList.ParameterType = typeof(object[]);
pArgumentList.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByStaticParameters",
Position = 3,
Mandatory = true
});
pArgumentList.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ArgumentList", pArgumentList);
return dynamicParameters;
}
protected void ExecuteSnapshotDeleteMethod(object[] invokeMethodInputParameters)
{
string resourceGroupName = (string)ParseParameter(invokeMethodInputParameters[0]);
string snapshotName = (string)ParseParameter(invokeMethodInputParameters[1]);
var result = SnapshotsClient.Delete(resourceGroupName, snapshotName);
WriteObject(result);
}
}
public partial class NewAzureComputeArgumentListCmdlet : ComputeAutomationBaseCmdlet
{
protected PSArgument[] CreateSnapshotDeleteParameters()
{
string resourceGroupName = string.Empty;
string snapshotName = string.Empty;
return ConvertFromObjectsToArguments(
new string[] { "ResourceGroupName", "SnapshotName" },
new object[] { resourceGroupName, snapshotName });
}
}
[Cmdlet(VerbsCommon.Remove, "AzureRmSnapshot", DefaultParameterSetName = "DefaultParameter", SupportsShouldProcess = true)]
[OutputType(typeof(PSOperationStatusResponse))]
public partial class RemoveAzureRmSnapshot : ComputeAutomationBaseCmdlet
{
public override void ExecuteCmdlet()
{
ExecuteClientAction(() =>
{
if (ShouldProcess(this.SnapshotName, VerbsCommon.Remove)
&& (this.Force.IsPresent ||
this.ShouldContinue(Properties.Resources.ResourceRemovalConfirmation,
"Remove-AzureRmSnapshot operation")))
{
string resourceGroupName = this.ResourceGroupName;
string snapshotName = this.SnapshotName;
var result = SnapshotsClient.Delete(resourceGroupName, snapshotName);
var psObject = new PSOperationStatusResponse();
ComputeAutomationAutoMapperProfile.Mapper.Map<Azure.Management.Compute.Models.OperationStatusResponse, PSOperationStatusResponse>(result, psObject);
WriteObject(psObject);
}
});
}
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 1,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[AllowNull]
[ResourceManager.Common.ArgumentCompleters.ResourceGroupCompleter()]
public string ResourceGroupName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 2,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Alias("Name")]
[AllowNull]
public string SnapshotName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Mandatory = false)]
[AllowNull]
public SwitchParameter Force { get; set; }
[Parameter(Mandatory = false, HelpMessage = "Run cmdlet in the background")]
public SwitchParameter AsJob { get; set; }
}
}
| naveedaz/azure-powershell | src/ResourceManager/Compute/Commands.Compute/Generated/Snapshot/SnapshotDeleteMethod.cs | C# | apache-2.0 | 6,217 |
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import re
import json
import copy
import time
from collections import defaultdict, OrderedDict
import requests
import _jsonnet # pylint: disable=import-error
LINUX_PRICING_URLS = [
# Deprecated instances (JSON format)
'https://aws.amazon.com/ec2/pricing/json/linux-od.json',
# Previous generation instances (JavaScript file)
'https://a0.awsstatic.com/pricing/1/ec2/previous-generation/linux-od.min.js',
# New generation instances (JavaScript file)
# Using other endpoint atm
# 'https://a0.awsstatic.com/pricing/1/ec2/linux-od.min.js'
]
EC2_REGIONS = [
'us-east-1',
'us-east-2',
'us-west-1',
'us-west-2',
'us-gov-west-1',
'eu-west-1',
'eu-west-2',
'eu-west-3',
'eu-north-1',
'eu-central-1',
'ca-central-1',
'ap-southeast-1',
'ap-southeast-2',
'ap-northeast-1',
'ap-northeast-2',
'ap-south-1',
'sa-east-1',
'cn-north-1',
]
EC2_INSTANCE_TYPES = [
't1.micro',
'm1.small',
'm1.medium',
'm1.large',
'm1.xlarge',
'm2.xlarge',
'm2.2xlarge',
'm2.4xlarge',
'm3.medium',
'm3.large',
'm3.xlarge',
'm3.2xlarge',
'c1.medium',
'c1.xlarge',
'cc1.4xlarge',
'cc2.8xlarge',
'c3.large',
'c3.xlarge',
'c3.2xlarge',
'c3.4xlarge',
'c3.8xlarge',
'd2.xlarge',
'd2.2xlarge',
'd2.4xlarge',
'd2.8xlarge',
'cg1.4xlarge',
'g2.2xlarge',
'g2.8xlarge',
'cr1.8xlarge',
'hs1.4xlarge',
'hs1.8xlarge',
'i2.xlarge',
'i2.2xlarge',
'i2.4xlarge',
'i2.8xlarge',
'i3.large',
'i3.xlarge',
'i3.2xlarge',
'i3.4xlarge',
'i3.8xlarge',
'i3.16large',
'r3.large',
'r3.xlarge',
'r3.2xlarge',
'r3.4xlarge',
'r3.8xlarge',
'r4.large',
'r4.xlarge',
'r4.2xlarge',
'r4.4xlarge',
'r4.8xlarge',
'r4.16xlarge',
't2.micro',
't2.small',
't2.medium',
't2.large',
'x1.32xlarge'
]
# Maps EC2 region name to region name used in the pricing file
REGION_NAME_MAP = {
'us-east': 'ec2_us_east',
'us-east-1': 'ec2_us_east',
'us-east-2': 'ec2_us_east_ohio',
'us-west': 'ec2_us_west',
'us-west-1': 'ec2_us_west',
'us-west-2': 'ec2_us_west_oregon',
'eu-west-1': 'ec2_eu_west',
'eu-west-2': 'ec2_eu_west_london',
'eu-west-3': 'ec2_eu_west_3',
'eu-ireland': 'ec2_eu_west',
'eu-central-1': 'ec2_eu_central',
'ca-central-1': 'ec2_ca_central_1',
'apac-sin': 'ec2_ap_southeast',
'ap-southeast-1': 'ec2_ap_southeast',
'apac-syd': 'ec2_ap_southeast_2',
'ap-southeast-2': 'ec2_ap_southeast_2',
'apac-tokyo': 'ec2_ap_northeast',
'ap-northeast-1': 'ec2_ap_northeast',
'ap-northeast-2': 'ec2_ap_northeast',
'ap-south-1': 'ec2_ap_south_1',
'sa-east-1': 'ec2_sa_east',
'us-gov-west-1': 'ec2_us_govwest',
'cn-north-1': 'ec2_cn_north',
}
INSTANCE_SIZES = [
'micro',
'small',
'medium',
'large',
'xlarge',
'x-large',
'extra-large'
]
RE_NUMERIC_OTHER = re.compile(r'(?:([0-9]+)|([-A-Z_a-z]+)|([^-0-9A-Z_a-z]+))')
BASE_PATH = os.path.dirname(os.path.abspath(__file__))
PRICING_FILE_PATH = os.path.join(BASE_PATH, '../libcloud/data/pricing.json')
PRICING_FILE_PATH = os.path.abspath(PRICING_FILE_PATH)
def scrape_ec2_pricing():
result = defaultdict(OrderedDict)
os_map = {'linux': 'ec2_linux', 'windows-std': 'ec2_windows'}
for item in os_map.values():
result[item] = {}
for url in LINUX_PRICING_URLS:
response = requests.get(url)
if re.match(r'.*?\.json$', url):
data = response.json()
print("Sample response: %s..." % (str(data)[:100]))
elif re.match(r'.*?\.js$', url):
data = response.content.decode('utf-8')
print("Sample response: %s..." % (data[:100]))
match = re.match(r'^.*callback\((.*?)\);?$', data,
re.MULTILINE | re.DOTALL)
data = match.group(1)
# NOTE: We used to use demjson, but it's not working under Python 3 and new version of
# setuptools anymore so we use jsonnet
# demjson supports non-strict mode and can parse unquoted objects
data = json.loads(_jsonnet.evaluate_snippet('snippet', data))
regions = data['config']['regions']
for region_data in regions:
region_name = region_data['region']
instance_types = region_data['instanceTypes']
for instance_type in instance_types:
sizes = instance_type['sizes']
for size in sizes:
if not result['ec2_linux'].get(size['size'], False):
result['ec2_linux'][size['size']] = {}
price = size['valueColumns'][0]['prices']['USD']
if str(price).lower() == 'n/a':
# Price not available
continue
result['ec2_linux'][size['size']][
region_name] = float(price)
res = defaultdict(OrderedDict)
url = ('https://calculator.aws/pricing/1.0/'
'ec2/region/{}/ondemand/{}/index.json')
instances = set()
for OS in ['linux', 'windows-std']:
res[os_map[OS]] = {}
for region in EC2_REGIONS:
res[os_map[OS]][region] = {}
full_url = url.format(region, OS)
response = requests.get(full_url)
if response.status_code != 200:
print("Skipping URL %s which returned non 200-status code (%s)" %
(full_url, response.status_code))
continue
data = response.json()
for entry in data['prices']:
instance_type = entry['attributes'].get(
'aws:ec2:instanceType', "")
instances.add(instance_type)
price = entry['price'].get('USD', 0)
res[os_map[OS]][region][instance_type] = price
for item in os_map.values():
for instance in instances:
if not result[item].get(instance, False):
result[item][instance] = {}
for region in EC2_REGIONS:
if res[item][region].get(instance, False):
result[item][instance][region] = float(res[
item][region][instance])
return result
def update_pricing_file(pricing_file_path, pricing_data):
with open(pricing_file_path, 'r') as fp:
content = fp.read()
data = json.loads(content)
original_data = copy.deepcopy(data)
data['compute'].update(pricing_data)
if data == original_data:
# Nothing has changed, bail out early and don't update "updated" attribute
print("Nothing has changed, skipping update.")
return
data['updated'] = int(time.time())
# Always sort the pricing info
data = sort_nested_dict(data)
content = json.dumps(data, indent=4)
lines = content.splitlines()
lines = [line.rstrip() for line in lines]
content = '\n'.join(lines)
with open(pricing_file_path, 'w') as fp:
fp.write(content)
def sort_nested_dict(value):
"""
Recursively sort a nested dict.
"""
result = OrderedDict()
for key, value in sorted(value.items(), key=sort_key_by_numeric_other):
if isinstance(value, (dict, OrderedDict)):
result[key] = sort_nested_dict(value)
else:
result[key] = value
return result
def sort_key_by_numeric_other(key_value):
"""
Split key into numeric, alpha and other part and sort accordingly.
"""
result = []
for (numeric, alpha, other) in RE_NUMERIC_OTHER.findall(key_value[0]):
numeric = int(numeric) if numeric else -1
alpha = INSTANCE_SIZES.index(alpha) if alpha in INSTANCE_SIZES else alpha
alpha = str(alpha)
item = tuple([numeric, alpha, other])
result.append(item)
return tuple(result)
def main():
print('Scraping EC2 pricing data (this may take up to 2 minutes)....')
pricing_data = scrape_ec2_pricing()
update_pricing_file(pricing_file_path=PRICING_FILE_PATH,
pricing_data=pricing_data)
print('Pricing data updated')
if __name__ == '__main__':
main()
| Kami/libcloud | contrib/scrape-ec2-prices.py | Python | apache-2.0 | 9,128 |
package com.sap.mlt.xliff12.impl.attribute;
import com.sap.mlt.xliff12.api.attribute.PropType;
import com.sap.mlt.xliff12.impl.base.XliffAttributeImpl;
/**
* @deprecated
*/
public class PropTypeImpl extends XliffAttributeImpl implements PropType {
public PropTypeImpl(String propType) {
super(NAME, propType);
}
}
| SAP/xliff-1-2 | com.sap.mlt.xliff12.impl/src/main/java/com/sap/mlt/xliff12/impl/attribute/PropTypeImpl.java | Java | apache-2.0 | 340 |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="MultipleSegmentUploaderTests.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Unit tests for the MultipleSegmentUploader class.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Azure.Management.DataLake.StoreUploader.Tests
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using Microsoft.Azure.Management.DataLake.StoreUploader;
using Xunit;
[SuppressMessage("StyleCop.CSharp.NamingRules", "*")]
[SuppressMessage("StyleCop.CSharp.LayoutRules", "*")]
[SuppressMessage("StyleCop.CSharp.MaintainabilityRules", "*")]
[SuppressMessage("StyleCop.CSharp.OrderingRules", "*")]
[SuppressMessage("StyleCop.CSharp.ReadabilityRules", "*")]
[SuppressMessage("StyleCop.CSharp.SpacingRules", "*")]
[SuppressMessage("StyleCop.CSharp.DocumentationRules", "*")]
public class MultipleSegmentUploaderTests : IDisposable
{
private readonly byte[] _smallFileContents = new byte[10 * 1024]; //10KB file
private string _smallFilePath;
#region Test Setup & Teardown
public MultipleSegmentUploaderTests()
{
GenerateFileData(_smallFileContents, ref _smallFilePath);
}
private void GenerateFileData(byte[] contents, ref string filePath)
{
filePath = Path.GetTempFileName();
var rnd = new Random(0);
rnd.NextBytes(contents);
if (File.Exists(filePath))
{
File.Delete(filePath);
}
File.WriteAllBytes(filePath, contents);
}
public void Dispose()
{
if (File.Exists(_smallFilePath))
{
File.Delete(_smallFilePath);
}
}
#endregion
#region Tests
/// <summary>
/// Tests an uneventful upload from scratch made of 1 segment.
/// </summary>
[Fact]
public void MultipleSegmentUploader_OneSegment()
{
var fe = new InMemoryFrontEnd();
var metadata = CreateMetadata(1);
try
{
var msu = new MultipleSegmentUploader(metadata, 1, fe);
msu.UseSegmentBlockBackOffRetryStrategy = false;
msu.Upload();
VerifyTargetStreamsAreComplete(metadata, fe);
}
finally
{
metadata.DeleteFile();
}
}
/// <summary>
/// Tests an uneventful upload from scratch made of several segments
/// </summary>
[Fact]
public void MultipleSegmentUploader_MultipleSegments()
{
var fe = new InMemoryFrontEnd();
var metadata = CreateMetadata(10);
try
{
var msu = new MultipleSegmentUploader(metadata, 1, fe);
msu.UseSegmentBlockBackOffRetryStrategy = false;
msu.Upload();
VerifyTargetStreamsAreComplete(metadata, fe);
}
finally
{
metadata.DeleteFile();
}
}
/// <summary>
/// Tests an uneventful upload from scratch made of several segments
/// </summary>
[Fact]
public void MultipleSegmentUploader_MultipleSegmentsAndMultipleThreads()
{
var fe = new InMemoryFrontEnd();
var metadata = CreateMetadata(10);
int threadCount = metadata.SegmentCount * 10; //intentionally setting this higher than the # of segments
try
{
var msu = new MultipleSegmentUploader(metadata, threadCount, fe);
msu.UseSegmentBlockBackOffRetryStrategy = false;
msu.Upload();
VerifyTargetStreamsAreComplete(metadata, fe);
}
finally
{
metadata.DeleteFile();
}
}
/// <summary>
/// Tests an uneventful upload from resume made of several segments
/// </summary>
[Fact]
public void MultipleSegmentUploader_ResumedUploadWithMultipleSegments()
{
//the strategy here is to upload everything, then delete a set of the segments, and verify that a resume will pick up the slack
var fe = new InMemoryFrontEnd();
var metadata = CreateMetadata(10);
try
{
var msu = new MultipleSegmentUploader(metadata, 1, fe);
msu.UseSegmentBlockBackOffRetryStrategy = false;
msu.Upload();
VerifyTargetStreamsAreComplete(metadata, fe);
//delete about 50% of segments
for (int i = 0; i < metadata.SegmentCount; i++)
{
var currentSegment = metadata.Segments[i];
if (i % 2 == 0)
{
currentSegment.Status = SegmentUploadStatus.Pending;
fe.DeleteStream(currentSegment.Path);
}
}
//re-upload everything
msu = new MultipleSegmentUploader(metadata, 1, fe);
msu.Upload();
VerifyTargetStreamsAreComplete(metadata, fe);
}
finally
{
metadata.DeleteFile();
}
}
/// <summary>
/// Tests an upload made of several segments, where
/// * some fail a couple of times => upload can finish.
/// * some fail too many times => upload will not finish
/// </summary>
[Fact]
public void MultipleSegmentUploader_SegmentInstability()
{
TestRetry(0);
TestRetry(1);
TestRetry(2);
TestRetry(3);
TestRetry(4);
TestRetry(5);
}
private void TestRetry(int segmentFailCount)
{
//we only have access to the underlying FrontEnd, so we need to simulate many exceptions in order to force a segment to fail the upload (multiply by SingleSegmentUploader.MaxBufferUploadAttemptAccount)
//this only works because we have a small file, which we know will fit in only one buffer (for a larger file, more complex operations are necessary)
int actualfailCount = segmentFailCount * SingleSegmentUploader.MaxBufferUploadAttemptCount;
bool expectSuccess = segmentFailCount < MultipleSegmentUploader.MaxUploadAttemptCount;
int callCount = 0;
//create a mock front end sitting on top of a working front end that simulates some erros for some time
var workingFrontEnd = new InMemoryFrontEnd();
var fe = new MockableFrontEnd(workingFrontEnd);
fe.CreateStreamImplementation =
(streamPath, overwrite, data, byteCount) =>
{
callCount++;
if (callCount <= actualfailCount)
{
throw new IntentionalException();
}
workingFrontEnd.CreateStream(streamPath, overwrite, data, byteCount);
};
fe.AppendToStreamImplementation =
(streamPath, data, offset, byteCount) =>
{
callCount++;
if (callCount <= actualfailCount)
{
throw new IntentionalException();
}
workingFrontEnd.AppendToStream(streamPath, data, offset, byteCount);
};
var metadata = CreateMetadata(1);
try
{
var msu = new MultipleSegmentUploader(metadata, 1, fe);
msu.UseSegmentBlockBackOffRetryStrategy = false;
if (expectSuccess)
{
//the Upload method should not throw any exceptions in this case
msu.Upload();
//if we are expecting success, verify that both the metadata and the target streams are complete
VerifyTargetStreamsAreComplete(metadata, workingFrontEnd);
}
else
{
//the Upload method should throw an aggregate exception in this case
Assert.Throws<AggregateException>(() => { msu.Upload(); });
//if we do not expect success, verify that at least 1 segment was marked as Failed
Assert.True(metadata.Segments.Any(s => s.Status == SegmentUploadStatus.Failed), "Could not find any failed segments");
//for every other segment, verify it was completed OK
foreach (var segment in metadata.Segments.Where(s => s.Status != SegmentUploadStatus.Failed))
{
VerifyTargetStreamIsComplete(segment, metadata, workingFrontEnd);
}
}
}
finally
{
metadata.DeleteFile();
}
}
#endregion
#region Test helpers
private void VerifyTargetStreamsAreComplete(UploadMetadata metadata, InMemoryFrontEnd fe)
{
foreach (var segment in metadata.Segments)
{
VerifyTargetStreamIsComplete(segment, metadata, fe);
}
}
private void VerifyTargetStreamIsComplete(UploadSegmentMetadata segmentMetadata, UploadMetadata metadata, InMemoryFrontEnd frontEnd)
{
Assert.Equal(SegmentUploadStatus.Complete, segmentMetadata.Status);
Assert.True(frontEnd.StreamExists(segmentMetadata.Path), string.Format("Segment {0} was not uploaded", segmentMetadata.SegmentNumber));
Assert.Equal(segmentMetadata.Length, frontEnd.GetStreamLength(segmentMetadata.Path));
var actualContents = frontEnd.GetStreamContents(segmentMetadata.Path);
var expectedContents = GetExpectedContents(segmentMetadata, metadata);
AssertExtensions.AreEqual(expectedContents, actualContents, "Segment {0} has unexpected contents", segmentMetadata.SegmentNumber);
}
private byte[] GetExpectedContents(UploadSegmentMetadata segment, UploadMetadata metadata)
{
byte[] result = new byte[segment.Length];
Array.Copy(_smallFileContents, segment.SegmentNumber * metadata.SegmentLength, result, 0, segment.Length);
return result;
}
private UploadMetadata CreateMetadata(int segmentCount)
{
var path = Path.GetTempFileName();
var metadata = new UploadMetadata()
{
MetadataFilePath = path,
InputFilePath = _smallFilePath,
FileLength = _smallFileContents.Length,
SegmentCount = segmentCount,
SegmentLength = UploadSegmentMetadata.CalculateSegmentLength(_smallFileContents.Length, segmentCount),
Segments = new UploadSegmentMetadata[segmentCount],
TargetStreamPath = "abc",
UploadId = "123",
IsBinary = true
};
long offset = 0;
for (int i = 0; i < segmentCount; i++)
{
long length = UploadSegmentMetadata.CalculateSegmentLength(i, metadata);
metadata.Segments[i] = new UploadSegmentMetadata()
{
SegmentNumber = i,
Offset = offset,
Status = SegmentUploadStatus.Pending,
Length = length,
Path = string.Format("{0}.{1}.segment{2}", metadata.TargetStreamPath, metadata.UploadId, i)
};
offset += length;
}
return metadata;
}
private class IntentionalException : Exception { }
#endregion
}
}
| yoavrubin/azure-sdk-for-net | src/ResourceManagement/DataLake.StoreUploader/DataLakeStoreUploader.Tests/UnitTests/MultipleSegmentUploaderTests.cs | C# | apache-2.0 | 12,372 |
package com.inktomi.cirrus.forecast;
import org.simpleframework.xml.Element;
@Element
public enum TimeCoordinate {
UTC("UTC"),
LOCAL("local");
private final String value;
TimeCoordinate(String v) {
value = v;
}
public String value() {
return value;
}
public static TimeCoordinate fromValue(String v) {
for (TimeCoordinate c: TimeCoordinate.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
| inktomi/cirrus | cirrus-library/src/main/java/com/inktomi/cirrus/forecast/TimeCoordinate.java | Java | apache-2.0 | 558 |
/*
* Copyright (C) 2009 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aqnote.app.barcode.history;
import android.database.sqlite.SQLiteOpenHelper;
import android.database.sqlite.SQLiteDatabase;
import android.content.Context;
/**
* @author Sean Owen
*/
final class DBHelper extends SQLiteOpenHelper {
private static final int DB_VERSION = 5;
private static final String DB_NAME = "barcode_scanner_history.db";
static final String TABLE_NAME = "history";
static final String ID_COL = "id";
static final String TEXT_COL = "text";
static final String FORMAT_COL = "format";
static final String DISPLAY_COL = "display";
static final String TIMESTAMP_COL = "timestamp";
static final String DETAILS_COL = "details";
DBHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
}
@Override
public void onCreate(SQLiteDatabase sqLiteDatabase) {
sqLiteDatabase.execSQL(
"CREATE TABLE " + TABLE_NAME + " (" +
ID_COL + " INTEGER PRIMARY KEY, " +
TEXT_COL + " TEXT, " +
FORMAT_COL + " TEXT, " +
DISPLAY_COL + " TEXT, " +
TIMESTAMP_COL + " INTEGER, " +
DETAILS_COL + " TEXT);");
}
@Override
public void onUpgrade(SQLiteDatabase sqLiteDatabase, int oldVersion, int newVersion) {
sqLiteDatabase.execSQL("DROP TABLE IF EXISTS " + TABLE_NAME);
onCreate(sqLiteDatabase);
}
}
| aqnote/AndroidTest | app-barcode/src/main/java/com/aqnote/app/barcode/history/DBHelper.java | Java | apache-2.0 | 1,955 |
/*
* Copyright © 2013-2019 camunda services GmbH and various authors (info@camunda.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.spring.test.transaction.modification;
import org.apache.commons.lang3.time.DateUtils;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import java.util.Date;
public class CalculateTimerDate {
public Date execute(DelegateExecution execution) {
execution.setVariable("createDate", new Date());
return DateUtils.addDays(new Date(), 1);
}
}
| xasx/camunda-bpm-platform | engine-spring/src/test/java/org/camunda/bpm/engine/spring/test/transaction/modification/CalculateTimerDate.java | Java | apache-2.0 | 1,058 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.glaf.mail.mapper;
import java.util.*;
import org.springframework.stereotype.Component;
import com.glaf.mail.domain.*;
import com.glaf.mail.query.*;
@Component
public interface MailStorageMapper {
void deleteMailStorages(MailStorageQuery query);
void deleteMailStorageById(String id);
MailStorage getMailStorageById(String id);
MailStorage getMailStorageByDataTable(String dataTable);
int getMailStorageCount(MailStorageQuery query);
List<MailStorage> getMailStorages(MailStorageQuery query);
void insertMailStorage(MailStorage model);
void updateMailStorage(MailStorage model);
} | jior/glaf | workspace/glaf-mail/src/main/java/com/glaf/mail/mapper/MailStorageMapper.java | Java | apache-2.0 | 1,402 |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package holophonor.org.objectweb.asm.commons;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import holophonor.org.objectweb.asm.Handle;
import holophonor.org.objectweb.asm.Label;
import holophonor.org.objectweb.asm.MethodVisitor;
import holophonor.org.objectweb.asm.Opcodes;
import holophonor.org.objectweb.asm.Type;
/**
* A {@link holophonor.org.objectweb.asm.MethodVisitor} to insert before, after and around
* advices in methods and constructors.
* <p>
* The behavior for constructors is like this:
* <ol>
*
* <li>as long as the INVOKESPECIAL for the object initialization has not been
* reached, every bytecode instruction is dispatched in the ctor code visitor</li>
*
* <li>when this one is reached, it is only added in the ctor code visitor and a
* JP invoke is added</li>
*
* <li>after that, only the other code visitor receives the instructions</li>
*
* </ol>
*
* @author Eugene Kuleshov
* @author Eric Bruneton
*/
public abstract class AdviceAdapter extends GeneratorAdapter implements Opcodes {
private static final Object THIS = new Object();
private static final Object OTHER = new Object();
protected int methodAccess;
protected String methodDesc;
private boolean constructor;
private boolean superInitialized;
private List<Object> stackFrame;
private Map<Label, List<Object>> branches;
/**
* Creates a new {@link AdviceAdapter}.
*
* @param api
* the ASM API version implemented by this visitor. Must be one
* of {@link Opcodes#ASM4}.
* @param mv
* the method visitor to which this adapter delegates calls.
* @param access
* the method's access flags (see {@link Opcodes}).
* @param name
* the method's name.
* @param desc
* the method's descriptor (see {@link Type Type}).
*/
protected AdviceAdapter(final int api, final MethodVisitor mv,
final int access, final String name, final String desc) {
super(api, mv, access, name, desc);
methodAccess = access;
methodDesc = desc;
constructor = "<init>".equals(name);
}
@Override
public void visitCode() {
mv.visitCode();
if (constructor) {
stackFrame = new ArrayList<Object>();
branches = new HashMap<Label, List<Object>>();
} else {
superInitialized = true;
onMethodEnter();
}
}
@Override
public void visitLabel(final Label label) {
mv.visitLabel(label);
if (constructor && branches != null) {
List<Object> frame = branches.get(label);
if (frame != null) {
stackFrame = frame;
branches.remove(label);
}
}
}
@Override
public void visitInsn(final int opcode) {
if (constructor) {
int s;
switch (opcode) {
case RETURN: // empty stack
onMethodExit(opcode);
break;
case IRETURN: // 1 before n/a after
case FRETURN: // 1 before n/a after
case ARETURN: // 1 before n/a after
case ATHROW: // 1 before n/a after
popValue();
onMethodExit(opcode);
break;
case LRETURN: // 2 before n/a after
case DRETURN: // 2 before n/a after
popValue();
popValue();
onMethodExit(opcode);
break;
case NOP:
case LALOAD: // remove 2 add 2
case DALOAD: // remove 2 add 2
case LNEG:
case DNEG:
case FNEG:
case INEG:
case L2D:
case D2L:
case F2I:
case I2B:
case I2C:
case I2S:
case I2F:
case ARRAYLENGTH:
break;
case ACONST_NULL:
case ICONST_M1:
case ICONST_0:
case ICONST_1:
case ICONST_2:
case ICONST_3:
case ICONST_4:
case ICONST_5:
case FCONST_0:
case FCONST_1:
case FCONST_2:
case F2L: // 1 before 2 after
case F2D:
case I2L:
case I2D:
pushValue(OTHER);
break;
case LCONST_0:
case LCONST_1:
case DCONST_0:
case DCONST_1:
pushValue(OTHER);
pushValue(OTHER);
break;
case IALOAD: // remove 2 add 1
case FALOAD: // remove 2 add 1
case AALOAD: // remove 2 add 1
case BALOAD: // remove 2 add 1
case CALOAD: // remove 2 add 1
case SALOAD: // remove 2 add 1
case POP:
case IADD:
case FADD:
case ISUB:
case LSHL: // 3 before 2 after
case LSHR: // 3 before 2 after
case LUSHR: // 3 before 2 after
case L2I: // 2 before 1 after
case L2F: // 2 before 1 after
case D2I: // 2 before 1 after
case D2F: // 2 before 1 after
case FSUB:
case FMUL:
case FDIV:
case FREM:
case FCMPL: // 2 before 1 after
case FCMPG: // 2 before 1 after
case IMUL:
case IDIV:
case IREM:
case ISHL:
case ISHR:
case IUSHR:
case IAND:
case IOR:
case IXOR:
case MONITORENTER:
case MONITOREXIT:
popValue();
break;
case POP2:
case LSUB:
case LMUL:
case LDIV:
case LREM:
case LADD:
case LAND:
case LOR:
case LXOR:
case DADD:
case DMUL:
case DSUB:
case DDIV:
case DREM:
popValue();
popValue();
break;
case IASTORE:
case FASTORE:
case AASTORE:
case BASTORE:
case CASTORE:
case SASTORE:
case LCMP: // 4 before 1 after
case DCMPL:
case DCMPG:
popValue();
popValue();
popValue();
break;
case LASTORE:
case DASTORE:
popValue();
popValue();
popValue();
popValue();
break;
case DUP:
pushValue(peekValue());
break;
case DUP_X1:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
break;
case DUP_X2:
s = stackFrame.size();
stackFrame.add(s - 3, stackFrame.get(s - 1));
break;
case DUP2:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
stackFrame.add(s - 2, stackFrame.get(s - 1));
break;
case DUP2_X1:
s = stackFrame.size();
stackFrame.add(s - 3, stackFrame.get(s - 1));
stackFrame.add(s - 3, stackFrame.get(s - 1));
break;
case DUP2_X2:
s = stackFrame.size();
stackFrame.add(s - 4, stackFrame.get(s - 1));
stackFrame.add(s - 4, stackFrame.get(s - 1));
break;
case SWAP:
s = stackFrame.size();
stackFrame.add(s - 2, stackFrame.get(s - 1));
stackFrame.remove(s);
break;
}
} else {
switch (opcode) {
case RETURN:
case IRETURN:
case FRETURN:
case ARETURN:
case LRETURN:
case DRETURN:
case ATHROW:
onMethodExit(opcode);
break;
}
}
mv.visitInsn(opcode);
}
@Override
public void visitVarInsn(final int opcode, final int var) {
super.visitVarInsn(opcode, var);
if (constructor) {
switch (opcode) {
case ILOAD:
case FLOAD:
pushValue(OTHER);
break;
case LLOAD:
case DLOAD:
pushValue(OTHER);
pushValue(OTHER);
break;
case ALOAD:
pushValue(var == 0 ? THIS : OTHER);
break;
case ASTORE:
case ISTORE:
case FSTORE:
popValue();
break;
case LSTORE:
case DSTORE:
popValue();
popValue();
break;
}
}
}
@Override
public void visitFieldInsn(final int opcode, final String owner,
final String name, final String desc) {
mv.visitFieldInsn(opcode, owner, name, desc);
if (constructor) {
char c = desc.charAt(0);
boolean longOrDouble = c == 'J' || c == 'D';
switch (opcode) {
case GETSTATIC:
pushValue(OTHER);
if (longOrDouble) {
pushValue(OTHER);
}
break;
case PUTSTATIC:
popValue();
if (longOrDouble) {
popValue();
}
break;
case PUTFIELD:
popValue();
if (longOrDouble) {
popValue();
popValue();
}
break;
// case GETFIELD:
default:
if (longOrDouble) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitIntInsn(final int opcode, final int operand) {
mv.visitIntInsn(opcode, operand);
if (constructor && opcode != NEWARRAY) {
pushValue(OTHER);
}
}
@Override
public void visitLdcInsn(final Object cst) {
mv.visitLdcInsn(cst);
if (constructor) {
pushValue(OTHER);
if (cst instanceof Double || cst instanceof Long) {
pushValue(OTHER);
}
}
}
@Override
public void visitMultiANewArrayInsn(final String desc, final int dims) {
mv.visitMultiANewArrayInsn(desc, dims);
if (constructor) {
for (int i = 0; i < dims; i++) {
popValue();
}
pushValue(OTHER);
}
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
mv.visitTypeInsn(opcode, type);
// ANEWARRAY, CHECKCAST or INSTANCEOF don't change stack
if (constructor && opcode == NEW) {
pushValue(OTHER);
}
}
@Override
public void visitMethodInsn(final int opcode, final String owner,
final String name, final String desc) {
mv.visitMethodInsn(opcode, owner, name, desc);
if (constructor) {
Type[] types = Type.getArgumentTypes(desc);
for (int i = 0; i < types.length; i++) {
popValue();
if (types[i].getSize() == 2) {
popValue();
}
}
switch (opcode) {
// case INVOKESTATIC:
// break;
case INVOKEINTERFACE:
case INVOKEVIRTUAL:
popValue(); // objectref
break;
case INVOKESPECIAL:
Object type = popValue(); // objectref
if (type == THIS && !superInitialized) {
onMethodEnter();
superInitialized = true;
// once super has been initialized it is no longer
// necessary to keep track of stack state
constructor = false;
}
break;
}
Type returnType = Type.getReturnType(desc);
if (returnType != Type.VOID_TYPE) {
pushValue(OTHER);
if (returnType.getSize() == 2) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
Object... bsmArgs) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
if (constructor) {
Type[] types = Type.getArgumentTypes(desc);
for (int i = 0; i < types.length; i++) {
popValue();
if (types[i].getSize() == 2) {
popValue();
}
}
Type returnType = Type.getReturnType(desc);
if (returnType != Type.VOID_TYPE) {
pushValue(OTHER);
if (returnType.getSize() == 2) {
pushValue(OTHER);
}
}
}
}
@Override
public void visitJumpInsn(final int opcode, final Label label) {
mv.visitJumpInsn(opcode, label);
if (constructor) {
switch (opcode) {
case IFEQ:
case IFNE:
case IFLT:
case IFGE:
case IFGT:
case IFLE:
case IFNULL:
case IFNONNULL:
popValue();
break;
case IF_ICMPEQ:
case IF_ICMPNE:
case IF_ICMPLT:
case IF_ICMPGE:
case IF_ICMPGT:
case IF_ICMPLE:
case IF_ACMPEQ:
case IF_ACMPNE:
popValue();
popValue();
break;
case JSR:
pushValue(OTHER);
break;
}
addBranch(label);
}
}
@Override
public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
final Label[] labels) {
mv.visitLookupSwitchInsn(dflt, keys, labels);
if (constructor) {
popValue();
addBranches(dflt, labels);
}
}
@Override
public void visitTableSwitchInsn(final int min, final int max,
final Label dflt, final Label... labels) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
if (constructor) {
popValue();
addBranches(dflt, labels);
}
}
@Override
public void visitTryCatchBlock(Label start, Label end, Label handler,
String type) {
super.visitTryCatchBlock(start, end, handler, type);
if (constructor && !branches.containsKey(handler)) {
List<Object> stackFrame = new ArrayList<Object>();
stackFrame.add(OTHER);
branches.put(handler, stackFrame);
}
}
private void addBranches(final Label dflt, final Label[] labels) {
addBranch(dflt);
for (int i = 0; i < labels.length; i++) {
addBranch(labels[i]);
}
}
private void addBranch(final Label label) {
if (branches.containsKey(label)) {
return;
}
branches.put(label, new ArrayList<Object>(stackFrame));
}
private Object popValue() {
return stackFrame.remove(stackFrame.size() - 1);
}
private Object peekValue() {
return stackFrame.get(stackFrame.size() - 1);
}
private void pushValue(final Object o) {
stackFrame.add(o);
}
/**
* Called at the beginning of the method or after super class class call in
* the constructor. <br>
* <br>
*
* <i>Custom code can use or change all the local variables, but should not
* change state of the stack.</i>
*/
protected void onMethodEnter() {
}
/**
* Called before explicit exit from the method using either return or throw.
* Top element on the stack contains the return value or exception instance.
* For example:
*
* <pre>
* public void onMethodExit(int opcode) {
* if(opcode==RETURN) {
* visitInsn(ACONST_NULL);
* } else if(opcode==ARETURN || opcode==ATHROW) {
* dup();
* } else {
* if(opcode==LRETURN || opcode==DRETURN) {
* dup2();
* } else {
* dup();
* }
* box(Type.getReturnType(this.methodDesc));
* }
* visitIntInsn(SIPUSH, opcode);
* visitMethodInsn(INVOKESTATIC, owner, "onExit", "(Ljava/lang/Object;I)V");
* }
*
* // an actual call back method
* public static void onExit(Object param, int opcode) {
* ...
* </pre>
*
* <br>
* <br>
*
* <i>Custom code can use or change all the local variables, but should not
* change state of the stack.</i>
*
* @param opcode
* one of the RETURN, IRETURN, FRETURN, ARETURN, LRETURN, DRETURN
* or ATHROW
*
*/
protected void onMethodExit(int opcode) {
}
// TODO onException, onMethodCall
}
| rikf/Holophonor | src/main/java/holophonor/org/objectweb/asm/commons/AdviceAdapter.java | Java | apache-2.0 | 19,221 |
package gnode
import (
"fmt"
"io"
"sync"
"time"
)
type ReceiverManager struct {
usage int
gnode *GNode
buffer MessageBuffer
receiverList []*MessageReceiver
ioChan chan *AntMes
nbReceiver int
receiver MessageReceiver
answerMap map[string]*AntMes
getChan chan string
lockClient sync.RWMutex
functionMap map[string]interface{}
}
func (m *ReceiverManager) loadFunctions() {
m.functionMap = make(map[string]interface{})
//node Functions
m.functionMap["ping"] = m.gnode.nodeFunctions.ping
m.functionMap["pingFromTo"] = m.gnode.nodeFunctions.pingFromTo
m.functionMap["setLogLevel"] = m.gnode.nodeFunctions.setLogLevel
m.functionMap["killNode"] = m.gnode.nodeFunctions.killNode
m.functionMap["updateGrid"] = m.gnode.nodeFunctions.updateGrid
m.functionMap["writeStatsInLog"] = m.gnode.nodeFunctions.writeStatsInLog
m.functionMap["clear"] = m.gnode.nodeFunctions.clear
m.functionMap["forceGC"] = m.gnode.nodeFunctions.forceGCMes
m.functionMap["getConnections"] = m.gnode.nodeFunctions.getConnections
m.functionMap["getNodeInfo"] = m.gnode.nodeFunctions.getNodeInfo
m.functionMap["createUser"] = m.gnode.nodeFunctions.createUser
m.functionMap["createNodeUser"] = m.gnode.nodeFunctions.createNodeUser
m.functionMap["removeUser"] = m.gnode.nodeFunctions.removeUser
m.functionMap["removeNodeUser"] = m.gnode.nodeFunctions.removeNodeUser
m.functionMap["setNodePublicKey"] = m.gnode.nodeFunctions.setNodePublicKey
m.functionMap["isReady"] = m.gnode.nodeFunctions.isReady
//gnode Function
m.functionMap["sendBackEvent"] = m.gnode.sendBackEvent
//EntryManager function
m.functionMap["addEntry"] = m.gnode.entryManager.addEntry
m.functionMap["addBranch"] = m.gnode.entryManager.addBranch
m.functionMap["getTree"] = m.gnode.treeManager.getTree
}
func (m *ReceiverManager) start(gnode *GNode, bufferSize int, maxGoRoutine int) {
m.gnode = gnode
m.loadFunctions()
m.lockClient = sync.RWMutex{}
m.nbReceiver = maxGoRoutine
m.buffer.init(bufferSize)
m.ioChan = make(chan *AntMes)
m.getChan = make(chan string)
m.answerMap = make(map[string]*AntMes)
m.receiverList = []*MessageReceiver{}
if maxGoRoutine <= 0 {
m.receiver.gnode = gnode
return
}
for i := 0; i < maxGoRoutine; i++ {
routine := &MessageReceiver{
id: i,
gnode: m.gnode,
receiverManager: m,
}
m.receiverList = append(m.receiverList, routine)
routine.start()
}
go func() {
for {
mes, ok := m.buffer.get(true)
//logf.info("Receive message ok=%t %v\n", ok, mes.toString())
if ok && mes != nil {
m.ioChan <- mes
}
}
}()
}
func (m *ReceiverManager) waitForAnswer(id string, timeoutSecond int) (*AntMes, error) {
if mes, ok := m.answerMap[id]; ok {
return mes, nil
}
timer := time.AfterFunc(time.Second*time.Duration(timeoutSecond), func() {
m.getChan <- "timeout"
})
logf.info("Waiting for answer originId=%s\n", id)
for {
retId := <-m.getChan
if retId == "timeout" {
return nil, fmt.Errorf("Timeout wiating for message answer id=%s", id)
}
if mes, ok := m.answerMap[id]; ok {
logf.info("Found answer originId=%s\n", id)
timer.Stop()
return mes, nil
}
}
}
func (m *ReceiverManager) receiveMessage(mes *AntMes) bool {
m.usage++
logf.debugMes(mes, "recceive message: %s\n", mes.toString())
if m.nbReceiver <= 0 {
m.receiver.executeMessage(mes)
return true
}
if m.buffer.put(mes) {
//logf.info("receive message function=%s duplicate=%d order=%d ok\n", mes.Function, mes.Duplicate, mes.Order)
return true
}
return false
}
func (m *ReceiverManager) stats() {
fmt.Printf("Receiver: nb=%d maxbuf=%d\n", m.usage, m.buffer.max)
execVal := ""
for _, exec := range m.receiverList {
execVal = fmt.Sprintf("%s %d", execVal, exec.usage)
}
fmt.Printf("Receivers: %s\n", execVal)
}
func (m *ReceiverManager) startClientReader(stream GNodeService_GetClientStreamServer) {
m.lockClient.Lock()
clientName := fmt.Sprintf("client-%d-%d", time.Now().UnixNano(), m.gnode.clientMap.len()+1)
m.gnode.clientMap.set(clientName, &gnodeClient{
name: clientName,
stream: stream,
})
stream.Send(&AntMes{
Function: "ClientAck",
FromClient: clientName,
})
logf.info("Client stream open: %s\n", clientName)
m.lockClient.Unlock() //unlock far to be sure to have several nano
for {
mes, err := stream.Recv()
if err == io.EOF {
logf.error("Client reader %s: EOF\n", clientName)
m.gnode.clientMap.del(clientName)
m.gnode.removeEventListener(clientName)
m.gnode.nodeFunctions.forceGC()
return
}
if err != nil {
logf.error("Client reader %s: Failed to receive message: %v\n", clientName, err)
m.gnode.clientMap.del(clientName)
m.gnode.removeEventListener(clientName)
m.gnode.nodeFunctions.forceGC()
return
}
if mes.Function == "setEventListener" {
m.gnode.setEventListener(mes.Args[0], mes.Args[1], mes.UserName, clientName)
} else {
mes.Id = m.gnode.getNewId(false)
mes.Origin = m.gnode.name
mes.FromClient = clientName
m.gnode.idMap.Add(mes.Id)
if mes.Debug {
logf.debugMes(mes, "-------------------------------------------------------------------------------------------------------------\n")
logf.debugMes(mes, "Receive mes from client %s : %v\n", clientName, mes)
}
for {
if m.gnode.receiverManager.receiveMessage(mes) {
break
}
time.Sleep(1 * time.Second)
}
}
}
}
| freignat91/blockchain | server/gnode/receiverManager.go | GO | apache-2.0 | 5,407 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openshift;
import com.openshift.client.IApplication;
import org.apache.camel.Component;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.impl.DefaultExchange;
import org.apache.camel.impl.ScheduledPollEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.util.ObjectHelper;
@UriEndpoint(scheme = "openshift", syntax = "openshift:clientId", consumerClass = OpenShiftConsumer.class, label = "cloud")
public class OpenShiftEndpoint extends ScheduledPollEndpoint {
@UriPath @Metadata(required = "true")
private String clientId;
@UriPath @Metadata(required = "true")
private String username;
@UriPath @Metadata(required = "true")
private String password;
@UriParam
private String domain;
@UriParam
private String server;
@UriParam(enums = "list,start,stop,restart,state")
private String operation;
@UriParam
private String application;
@UriParam
private String mode;
public OpenShiftEndpoint(String endpointUri, Component component) {
super(endpointUri, component);
}
@Override
public Producer createProducer() throws Exception {
ObjectHelper.notEmpty(clientId, "clientId", this);
ObjectHelper.notEmpty(username, "username", this);
ObjectHelper.notEmpty(password, "password", this);
return new OpenShiftProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
ObjectHelper.notEmpty(clientId, "clientId", this);
ObjectHelper.notEmpty(username, "username", this);
ObjectHelper.notEmpty(password, "password", this);
Consumer consumer = new OpenShiftConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
public Exchange createExchange(IApplication application) {
Exchange exchange = new DefaultExchange(this);
exchange.getIn().setBody(application);
return exchange;
}
@Override
public boolean isSingleton() {
return true;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getServer() {
return server;
}
public void setServer(String server) {
this.server = server;
}
public String getOperation() {
return operation;
}
public void setOperation(String operation) {
this.operation = operation;
}
public void setOperation(OpenShiftOperation operation) {
this.operation = operation.name();
}
public String getApplication() {
return application;
}
public void setApplication(String application) {
this.application = application;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
this.mode = mode;
}
}
| ramonmaruko/camel | components/camel-openshift/src/main/java/org/apache/camel/component/openshift/OpenShiftEndpoint.java | Java | apache-2.0 | 4,430 |
/*
* Copyright 2016-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.designpattern.manage.base.creationalpatterns.prototype;
/**
* <p>原型类</p>
*
* @author kay
* @version 1.0
*/
public class Prototype implements Cloneable {
public Prototype clone(){
Prototype prototype = null;
try {
prototype = (Prototype)super.clone();
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
return prototype;
}
}
| Minato262/Design-Pattern | src/java/main/org/designpattern/manage/base/creationalpatterns/prototype/Prototype.java | Java | apache-2.0 | 1,057 |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package validation
import (
"context"
"reflect"
"regexp"
"sync"
"time"
"github.com/golang/protobuf/ptypes"
"google.golang.org/genproto/googleapis/devtools/cloudtrace/v2"
genprotoStatus "google.golang.org/genproto/googleapis/rpc/status"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
const (
// These restrictions can be found at
// https://cloud.google.com/trace/docs/reference/v2/rpc/google.devtools.cloudtrace.v2
maxAnnotationAttributes = 4
maxAnnotationBytes = 256
maxAttributes = 32
maxAttributeKeyBytes = 128
maxAttributeValueBytes = 256
maxDisplayNameBytes = 128
maxLinks = 128
maxTimeEvents = 32
agent = "g.co/agent"
shortenedAgent = "agent"
)
var (
// The exporter is responsible for mapping these special attributes to the correct
// canonical Cloud Trace attributes (/http/method, /http/route, etc.)
specialAttributes = map[string]struct{}{
"http.method": {},
"http.route": {},
"http.status_code": {},
}
requiredFields = []string{"Name", "SpanId", "DisplayName", "StartTime", "EndTime"}
spanNameRegex = regexp.MustCompile("^projects/[^/]+/traces/[a-fA-F0-9]{32}/spans/[a-fA-F0-9]{16}$")
agentRegex = regexp.MustCompile(`^opentelemetry-[a-zA-Z]+ \d+(?:\.\d+)+; google-cloud-trace-exporter \d+(?:\.\d+)+$`)
)
// SpanData wraps all the span data on the server into a struct.
type SpanData struct {
// If a batch has a bad span, we don't write batch to memory, but still want
// info on them for summary, so need SpansSummary
SpansSummary []*cloudtrace.Span
UploadedSpanNames map[string]struct{}
UploadedSpans []*cloudtrace.Span
Mutex sync.RWMutex
}
// ValidateSpans checks that the spans conform to the API requirements.
// That is, required fields are present, and optional fields are of the correct form.
// If any violations are detected, the errors will be added to the result table.
func ValidateSpans(requestName string, spanData *SpanData, spans ...*cloudtrace.Span) error {
var overallError error
currentRequestSpanNames := make(map[string]struct{})
for _, span := range spans {
var currentError error
// Validate required fields are present and semantically make sense.
if err := CheckForRequiredFields(requiredFields, reflect.ValueOf(span), requestName); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if err := validateName(span.Name, spanData.UploadedSpanNames, currentRequestSpanNames); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if err := validateTimeStamps(span); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if err := validateDisplayName(span.DisplayName); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
// Validate that if optional fields are present, they conform to the API.
if err := validateAttributes(span.Attributes, maxAttributes); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if err := validateTimeEvents(span.TimeEvents); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if err := validateLinks(span.Links); err != nil {
addSpanToSummary(&spanData.SpansSummary, span, err)
currentError = err
}
if currentError == nil {
addSpanToSummary(&spanData.SpansSummary, span, nil)
} else {
overallError = currentError
}
}
if overallError != nil {
return overallError
}
return nil
}
// addSpanToSummary sets the span's status and adds it to the summary slice.
func addSpanToSummary(spanSummary *[]*cloudtrace.Span, span *cloudtrace.Span, err error) {
setSpanStatus(span, err)
*spanSummary = append(*spanSummary, span)
}
func setSpanStatus(span *cloudtrace.Span, err error) {
if err == nil {
span.Status = &genprotoStatus.Status{
Code: int32(codes.OK),
Message: "OK",
}
} else {
span.Status = &genprotoStatus.Status{
Code: int32(status.Convert(err).Code()),
Message: status.Convert(err).Message(),
}
}
}
// AddSpans adds the given spans to the list of uploaded spans.
func AddSpans(spanData *SpanData, spans ...*cloudtrace.Span) {
for _, span := range spans {
spanData.UploadedSpans = append(spanData.UploadedSpans, span)
spanData.UploadedSpanNames[span.Name] = struct{}{}
}
}
// Delay will block for the specified amount of time.
// Used to delay writing spans to memory.
func Delay(ctx context.Context, delay time.Duration) error {
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(delay):
return nil
}
}
// AccessSpan returns the span at the given index if it is in range.
// If it is not in range, nil is returned.
func AccessSpan(index int, uploadedSpans []*cloudtrace.Span) *cloudtrace.Span {
if index >= len(uploadedSpans) || index < 0 {
return nil
}
return uploadedSpans[index]
}
// validateDisplayName verifies that the display name has at most 128 bytes.
func validateDisplayName(displayName *cloudtrace.TruncatableString) error {
if len(displayName.Value) > maxDisplayNameBytes {
return statusInvalidDisplayName
}
return nil
}
// validateName verifies that the span name is not a duplicate, and is of the form:
// projects/{project_id}/traces/{trace_id}/spans/{span_id}
// where trace_id is a 32-char hex encoding, and span_id is a 16-char hex encoding.
func validateName(name string, spanNames map[string]struct{}, currentRequestSpanNames map[string]struct{}) error {
if _, ok := spanNames[name]; ok {
return statusDuplicateSpanName
}
if _, ok := currentRequestSpanNames[name]; ok {
return statusDuplicateSpanName
}
if !spanNameRegex.MatchString(name) {
return statusInvalidSpanName
}
currentRequestSpanNames[name] = struct{}{}
return nil
}
// validateTimeStamps verifies that the start time of a span is before its end time.
func validateTimeStamps(span *cloudtrace.Span) error {
start, err := ptypes.Timestamp(span.StartTime)
if err != nil {
return statusMalformedTimestamp
}
end, err := ptypes.Timestamp(span.EndTime)
if err != nil {
return statusMalformedTimestamp
}
if !start.Before(end) {
return statusInvalidInterval
}
return nil
}
// validateAttributes verifies that a span has at most 32 attributes, where each attribute is a dictionary.
// The key is a string with max length of 128 bytes, and the value can be a string, int64 or bool.
// If the value is a string, it has a max length of 256 bytes.
func validateAttributes(attributes *cloudtrace.Span_Attributes, maxAttributes int) error {
if attributes == nil {
return nil
}
if len(attributes.AttributeMap) > maxAttributes {
return statusTooManyAttributes
}
containsAgent := false
for k, v := range attributes.AttributeMap {
if len(k) > maxAttributeKeyBytes {
return statusInvalidAttributeKey
}
// Ensure that the special attributes have been translated properly.
if _, ok := specialAttributes[k]; ok {
return statusUnmappedSpecialAttribute
}
if val, ok := v.Value.(*cloudtrace.AttributeValue_StringValue); ok {
if len(val.StringValue.Value) > maxAttributeValueBytes {
return statusInvalidAttributeValue
}
// The span must contain the attribute "g.co/agent" or "agent".
if k == agent || k == shortenedAgent {
containsAgent = true
if err := validateAgent(val.StringValue.Value); err != nil {
return err
}
}
}
}
if !containsAgent {
return statusMissingAgentAttribute
}
return nil
}
// validateAgent checks that the g.co/agent or agent attribute is of the form
// opentelemetry-<language_code> <ot_version>; google-cloud-trace-exporter <exporter_version>
func validateAgent(agent string) error {
if !agentRegex.MatchString(agent) {
return statusInvalidAgentAttribute
}
return nil
}
// validateTimeEvents verifies that a span has at most 32 TimeEvents.
// A TimeEvent consists of a TimeStamp, and either an Annotation or a MessageEvent.
// An Annotation is a dictionary that maps a string description to a list of attributes.
// A MessageEvent describes messages sent between spans and must contain an ID and size.
func validateTimeEvents(events *cloudtrace.Span_TimeEvents) error {
if events == nil {
return nil
}
if len(events.TimeEvent) > maxTimeEvents {
return statusTooManyTimeEvents
}
for _, event := range events.TimeEvent {
if event.Time == nil {
return statusTimeEventMissingTime
}
switch e := event.Value.(type) {
case *cloudtrace.Span_TimeEvent_Annotation_:
if len(e.Annotation.Description.Value) > maxAnnotationBytes {
return statusInvalidAnnotation
}
if err := validateAttributes(e.Annotation.Attributes, maxAnnotationAttributes); err != nil {
return err
}
case *cloudtrace.Span_TimeEvent_MessageEvent_:
if e.MessageEvent.Id <= 0 || e.MessageEvent.UncompressedSizeBytes <= 0 {
return statusInvalidMessageEvent
}
}
}
return nil
}
// validateLinks verifies that a span has at most 128 links, which are used to link the span to another span.
// A link contains a traceId, spanId, the type of the span, and at most 32 attributes.
func validateLinks(links *cloudtrace.Span_Links) error {
if links == nil {
return nil
}
if len(links.Link) > maxLinks {
return statusTooManyLinks
}
for _, link := range links.Link {
if link.SpanId == "" || link.TraceId == "" {
return statusInvalidLink
}
if err := validateAttributes(link.Attributes, maxAttributes); err != nil {
return err
}
}
return nil
}
| googleinterns/cloud-operations-api-mock | internal/validation/mock_trace_validation.go | GO | apache-2.0 | 10,158 |
package org.cnodejs.android.md.ui.jsbridge;
import android.webkit.JavascriptInterface;
import org.cnodejs.android.md.util.FormatUtils;
import org.joda.time.DateTime;
public final class FormatJavascriptInterface {
public static final String NAME = "formatBridge";
@JavascriptInterface
public String getRelativeTimeSpanString(String time) {
return FormatUtils.getRelativeTimeSpanString(new DateTime(time));
}
}
| TakWolf/CNode-Material-Design | app/src/main/java/org/cnodejs/android/md/ui/jsbridge/FormatJavascriptInterface.java | Java | apache-2.0 | 439 |
package com.lingju.assistant.view;
import android.app.Dialog;
import android.content.Context;
import android.os.Bundle;
import android.view.View;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import com.lingju.assistant.R;
import java.util.HashMap;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* Created by Ken on 2017/2/18.
*/
public class AlarmItemDialog extends Dialog {
@BindView(R.id.alarm_item_btns)
RadioGroup mAlarmItemBtns;
@BindView(R.id.alarm_item_rb1)
RadioButton mAlarmItemRb1;
@BindView(R.id.alarm_item_rb2)
RadioButton mAlarmItemRb2;
@BindView(R.id.alarm_item_rb3)
RadioButton mAlarmItemRb3;
private String mAlarmItem;
private OnItemSelectedListener mSelectedListener;
private Map<String, Integer> itemMaps = new HashMap<>();
public AlarmItemDialog(Context context, String item, OnItemSelectedListener listener) {
super(context, R.style.lingju_commond_dialog);
mAlarmItem = item;
mSelectedListener = listener;
}
protected AlarmItemDialog(Context context, int themeResId) {
super(context, themeResId);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.dialog_alarm_item);
ButterKnife.bind(this);
fillMap();
mAlarmItemBtns.check(itemMaps.get(mAlarmItem));
mAlarmItemBtns.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
RadioButton selectedBtn = (RadioButton) findViewById(checkedId);
mAlarmItem = selectedBtn.getText().toString();
if (mSelectedListener != null)
mSelectedListener.onSelected(mAlarmItem);
dismiss();
}
});
}
private void fillMap() {
itemMaps.put(mAlarmItemRb1.getText().toString(), R.id.alarm_item_rb1);
itemMaps.put(mAlarmItemRb2.getText().toString(), R.id.alarm_item_rb2);
itemMaps.put(mAlarmItemRb3.getText().toString(), R.id.alarm_item_rb3);
}
@OnClick({R.id.tv_cancel})
public void onClick(View view) {
switch (view.getId()) {
case R.id.tv_cancel:
dismiss();
break;
}
}
/* @OnClick({R.id.alarm_item_cancel, R.id.alarm_item_confirm})
public void onClick(View view) {
switch (view.getId()) {
case R.id.alarm_item_confirm:
if (mSelectedListener != null)
mSelectedListener.onSelected(mAlarmItem);
break;
}
dismiss();
}*/
public interface OnItemSelectedListener {
void onSelected(String item);
}
}
| LingjuAI/AssistantBySDK | app/src/main/java/com/lingju/assistant/view/AlarmItemDialog.java | Java | apache-2.0 | 2,902 |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app.data;
import android.annotation.TargetApi;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
public class WeatherProvider extends ContentProvider {
// The URI Matcher used by this content provider.
private static final UriMatcher sUriMatcher = buildUriMatcher();
private WeatherDbHelper mOpenHelper;
static final int WEATHER = 100;
static final int WEATHER_WITH_LOCATION = 101;
static final int WEATHER_WITH_LOCATION_AND_DATE = 102;
static final int LOCATION = 300;
private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder;
static{
sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder();
//This is an inner join which looks like
//weather INNER JOIN location ON weather.location_id = location._id
sWeatherByLocationSettingQueryBuilder.setTables(
WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " +
WeatherContract.LocationEntry.TABLE_NAME +
" ON " + WeatherContract.WeatherEntry.TABLE_NAME +
"." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY +
" = " + WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry._ID);
}
//location.location_setting = ?
private static final String sLocationSettingSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? ";
//location.location_setting = ? AND date >= ?
private static final String sLocationSettingWithStartDateSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? ";
//location.location_setting = ? AND date = ?
private static final String sLocationSettingAndDaySelection =
WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " = ? ";
private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri);
String[] selectionArgs;
String selection;
if (startDate == 0) {
selection = sLocationSettingSelection;
selectionArgs = new String[]{locationSetting};
} else {
selectionArgs = new String[]{locationSetting, Long.toString(startDate)};
selection = sLocationSettingWithStartDateSelection;
}
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
}
private Cursor getWeatherByLocationSettingAndDate(
Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long date = WeatherContract.WeatherEntry.getDateFromUri(uri);
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
sLocationSettingAndDaySelection,
new String[]{locationSetting, Long.toString(date)},
null,
null,
sortOrder
);
}
/*
Students: Here is where you need to create the UriMatcher. This UriMatcher will
match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE,
and LOCATION integer constants defined above. You can test this by uncommenting the
testUriMatcher test within TestUriMatcher.
*/
static UriMatcher buildUriMatcher() {
// 1) The code passed into the constructor represents the code to return for the root
// URI. It's common to use NO_MATCH as the code for this case. Add the constructor below.
UriMatcher aURIMatcher = new UriMatcher(UriMatcher.NO_MATCH);
// 2) Use the addURI function to match each of the types. Use the constants from
// WeatherContract to help define the types to the UriMatcher.
aURIMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_WEATHER, WEATHER);
aURIMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_WEATHER+"/*", WEATHER_WITH_LOCATION);
aURIMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_WEATHER+"/*/#", WEATHER_WITH_LOCATION_AND_DATE);
aURIMatcher.addURI(WeatherContract.CONTENT_AUTHORITY, WeatherContract.PATH_LOCATION, LOCATION);
// 3) Return the new matcher!
return aURIMatcher;
}
/*
Students: We've coded this for you. We just create a new WeatherDbHelper for later use
here.
*/
@Override
public boolean onCreate() {
mOpenHelper = new WeatherDbHelper(getContext());
return true;
}
/*
Students: Here's where you'll code the getType function that uses the UriMatcher. You can
test this by uncommenting testGetType in TestProvider.
*/
@Override
public String getType(Uri uri) {
// Use the Uri Matcher to determine what kind of URI this is.
final int match = sUriMatcher.match(uri);
switch (match) {
// Student: Uncomment and fill out these two cases
case WEATHER_WITH_LOCATION_AND_DATE:
return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE;
case WEATHER_WITH_LOCATION:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case WEATHER:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case LOCATION:
return WeatherContract.LocationEntry.CONTENT_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
// Here's the switch statement that, given a URI, will determine what kind of request it is,
// and query the database accordingly.
Cursor retCursor;
switch (sUriMatcher.match(uri)) {
// "weather/*/*"
case WEATHER_WITH_LOCATION_AND_DATE:
{
retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder);
break;
}
// "weather/*"
case WEATHER_WITH_LOCATION: {
retCursor = getWeatherByLocationSetting(uri, projection, sortOrder);
break;
}
// "weather"
case WEATHER: {
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.WeatherEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
null);
break;
}
// "location"
case LOCATION: {
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.LocationEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
null);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
retCursor.setNotificationUri(getContext().getContentResolver(), uri);
return retCursor;
}
/*
Student: Add the ability to insert Locations to the implementation of this function.
*/
@Override
public Uri insert(Uri uri, ContentValues values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case WEATHER: {
normalizeDate(values);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
case LOCATION: {
long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = WeatherContract.LocationEntry.buildLocationUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
db.close();
return returnUri;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
// Student: Start by getting a writable database
// Student: Use the uriMatcher to match the WEATHER and LOCATION URI's we are going to
// handle. If it doesn't match these, throw an UnsupportedOperationException.
// Student: A null value deletes all rows. In my implementation of this, I only notified
// the uri listeners (using the content resolver) if the rowsDeleted != 0 or the selection
// is null.
// Oh, and you should notify the listeners here.
// Student: return the actual rows deleted
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
int nbDeletedRows=0;
if (null == selection) selection = "1";
switch (match) {
case WEATHER: {
nbDeletedRows = db.delete(WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs);
break;
}
case LOCATION: {
nbDeletedRows = db.delete(WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (nbDeletedRows != 0)
{
getContext().getContentResolver().notifyChange(uri, null);
}
db.close();
return nbDeletedRows;
}
private void normalizeDate(ContentValues values) {
// normalize the date value
if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) {
long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE);
values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue));
}
}
@Override
public int update(
Uri uri, ContentValues values, String selection, String[] selectionArgs) {
// Student: This is a lot like the delete function. We return the number of rows impacted
// by the update.
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
int nbUpdatedRows=0;
switch (match) {
case WEATHER: {
nbUpdatedRows = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection, selectionArgs);
break;
}
case LOCATION: {
nbUpdatedRows = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection, selectionArgs);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (nbUpdatedRows != 0) getContext().getContentResolver().notifyChange(uri, null);
db.close();
return nbUpdatedRows;
}
@Override
public int bulkInsert(Uri uri, ContentValues[] values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
case WEATHER:
db.beginTransaction();
int returnCount = 0;
try {
for (ContentValues value : values) {
normalizeDate(value);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value);
if (_id != -1) {
returnCount++;
}
}
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
getContext().getContentResolver().notifyChange(uri, null);
return returnCount;
default:
return super.bulkInsert(uri, values);
}
}
// You do not need to call this method. This is a method specifically to assist the testing
// framework in running smoothly. You can read more at:
// http://developer.android.com/reference/android/content/ContentProvider.html#shutdown()
@Override
@TargetApi(11)
public void shutdown() {
mOpenHelper.close();
super.shutdown();
}
} | aboukaram/sunshine-version-chadi | app/src/main/java/com/example/android/sunshine/app/data/WeatherProvider.java | Java | apache-2.0 | 14,859 |
/*
* Copyright 2010-2011 Nabeel Mukhtar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.code.linkedinapi.schema;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{}connect-type"/>
* <element ref="{}authorization"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
public interface InvitationRequest
extends SchemaEntity
{
/**
* Gets the value of the connectType property.
*
* @return
* possible object is
* {@link InviteConnectType }
*
*/
InviteConnectType getConnectType();
/**
* Sets the value of the connectType property.
*
* @param value
* allowed object is
* {@link InviteConnectType }
*
*/
void setConnectType(InviteConnectType value);
/**
* Gets the value of the authorization property.
*
* @return
* possible object is
* {@link Authorization }
*
*/
Authorization getAuthorization();
/**
* Sets the value of the authorization property.
*
* @param value
* allowed object is
* {@link Authorization }
*
*/
void setAuthorization(Authorization value);
}
| shisoft/LinkedIn-J | core/src/main/java/com/google/code/linkedinapi/schema/InvitationRequest.java | Java | apache-2.0 | 2,192 |
export interface DocItem {
title: string
url?: string
children?: DocItem[]
}
export const DOCS: DocItem[] = [
{
title: 'Projects',
children: [
{
title: 'How to create a project?',
url: 'assets/docs/projects/how_to_create_project.html'
},
{
title: 'How to open, save or import a project?',
url: 'assets/docs/projects/How_to_open_save_or_import_a_project.html'
}
]
},
{
title: 'Plugins',
children: [
{
title: 'What is a plugin and how does it work?',
url: 'assets/docs/plugins/How_does_a_plugin_work.html'
}
]
},
{
title: 'Execution encironments',
children: [
{
title: 'What is an entrypoint?',
url: 'assets/docs/exec_env/What_is_an_entrypoint.html'
},
{
title: 'How to change run commands and/or entrypoint?',
url: 'assets/docs/exec_env/How_to_change_run_commands_andor_entrypoint.html'
},
{
title: 'How to change editor themes?',
url: 'assets/docs/exec_env/How_to_change_editor_themes.html'
},
{
title: 'How to manage files and folders in my project?',
url: 'assets/docs/exec_env/How_to_manage_files_and_folders_in_my_project.html'
}
]
},
{
title: 'Marketplace',
children: [
{
title: 'How to install a plugin?',
url: 'assets/docs/marketplace/How_to_install_a_plugin.html'
},
{
title: 'Can I install more than one plugin?',
url: 'assets/docs/marketplace/Can_I_install_more_than_one_plugin.html'
},
{
title: 'How to create a plugin?',
url: 'assets/docs/marketplace/How_to_create_a_plugin.html'
},
{
title: 'How to become a developer?',
url: 'assets/docs/marketplace/How_to_become_a_developer.html'
}
]
},
{
title: 'Miscellaneous',
children: [
{
title: 'What are Identicons?',
url: 'assets/docs/miscellaneous/What_are_Identicons.html'
},
{
title: 'What are different user permissions?',
url: 'assets/docs/miscellaneous/What_are_different_user_permissions.html'
},
{
title: 'How to share code snippets from my project?',
url: 'assets/docs/miscellaneous/How_to_share_code_snippets_from_my_project.html'
}
]
}
]
| yashdsaraf/reimagined-eureka | frontend/src/app/components/docs/docs-content.ts | TypeScript | apache-2.0 | 2,385 |
package com.wuyin.supermarket.manager;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* Created by wuyin on 2016/5/2.
* 管理线程池
*/
public class ThreadManager {
ThreadPoolProxy poolProxy;
private static ThreadManager instance = new ThreadManager();
private ThreadManager() {
}
public static ThreadManager getInstance() {
return instance;
}
public class ThreadPoolProxy {
ThreadPoolExecutor mPoolExecutor;
//线程池的数量
private int corePoolSize;
private int maximunPoolSize;
private long aliveTime;
/**
* @param corePoolSize 线程池的大小
* @param maximunPoolSize 如果排队满了额外开启的线程的个数
* @param time 线程存活的时间(单位/毫秒)
*/
public ThreadPoolProxy(int corePoolSize, int maximunPoolSize, long time) {
this.corePoolSize = corePoolSize;
this.maximunPoolSize = maximunPoolSize;
this.aliveTime = time;
}
/**
* 任务执行
*
* @param runnable
*/
public void execute(Runnable runnable) {
if (mPoolExecutor == null) {
//创建线程池
/**
* 1、线程池中有几个线程
* 2、如果排队满了,额外开的线程
* 3、如果这个线程池没有要执行的任务,存活多久
* 4、时间的单位
* 5、如果这个线程池里管理的线程都已经用了,剩下的任务 临时存在LinkedBlockingDeque中
*/
mPoolExecutor = new ThreadPoolExecutor(
corePoolSize, maximunPoolSize, aliveTime, TimeUnit.MILLISECONDS
, new LinkedBlockingQueue<Runnable>(10)
);
}
mPoolExecutor.execute(runnable); //调用功能线程池,执行异步任务
}
/**
* 取消任务
*
* @param runnable 任务对象
*/
public void cancel(Runnable runnable) {
//线程不为空 没有挂起
if (mPoolExecutor != null && !mPoolExecutor.isShutdown() && mPoolExecutor.isTerminated()) {
mPoolExecutor.remove(runnable);
}
}
}
/**
* 创建线程池 cpu核数*2+1
*
* @return
*/
public synchronized ThreadPoolProxy createLongPool() {
return createShortPool(5, 5, 5000);
}
/**
* @param size 线程池的大小
* @param aliSize 如果排队满了额外开启的线程的个数
* @param time 线程存活的时间(单位/毫秒)
* @return
*/
public synchronized ThreadPoolProxy createShortPool(int size, int aliSize, long time) {
if (poolProxy == null) {
poolProxy = new ThreadPoolProxy(size, aliSize, time);
}
return poolProxy;
}
}
| wuyinlei/SuperMarket | src/main/java/com/wuyin/supermarket/manager/ThreadManager.java | Java | apache-2.0 | 3,126 |
#
# Author:: Joshua Timberman (<joshua@getchef.com>)
# Copyright (c) 2014, Chef Software, Inc. <legal@getchef.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Resource::HomebrewPackage, 'initialize' do
let(:resource) { Chef::Resource::HomebrewPackage.new('emacs') }
it 'returns a Chef::Resource::HomebrewPackage' do
expect(resource).to be_a_kind_of(Chef::Resource::HomebrewPackage)
end
it 'sets the resource_name to :homebrew_package' do
expect(resource.resource_name).to eql(:homebrew_package)
end
it 'sets the provider to Chef::Provider::Package::Homebrew' do
expect(resource.provider).to eql(Chef::Provider::Package::Homebrew)
end
end
| jordane/chef | spec/unit/resource/homebrew_package_spec.rb | Ruby | apache-2.0 | 1,212 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.client.android.backend.model;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import com.google.gson.annotations.SerializedName;
public final class Environment implements Parcelable {
@SerializedName("id")
private String id;
public Environment(@NonNull String id) {
this.id = id;
}
public String getId() {
return id;
}
public static Creator<Environment> CREATOR = new Creator<Environment>() {
@Override
public Environment createFromParcel(Parcel parcel) {
return new Environment(parcel);
}
@Override
public Environment[] newArray(int size) {
return new Environment[size];
}
};
private Environment(Parcel parcel) {
this.id = parcel.readString();
}
@Override
public void writeToParcel(Parcel parcel, int flags) {
parcel.writeString(id);
}
@Override
public int describeContents() {
return 0;
}
}
| pilhuhn/hawkular-android-client | src/main/java/org/hawkular/client/android/backend/model/Environment.java | Java | apache-2.0 | 1,729 |
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.photoreccarver;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.CarvedFileContainer;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskFileRange;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
/**
* This class parses the xml output from PhotoRec, and creates a list of entries to add back in to be processed.
*/
class PhotoRecCarverOutputParser {
private final Path basePath;
private static final Logger logger = Logger.getLogger(PhotoRecCarverFileIngestModule.class.getName());
PhotoRecCarverOutputParser(Path base) {
basePath = base;
}
/**
* Parses the given report.xml file, creating a List<LayoutFile> to return. Uses FileManager to add all carved files
* that it finds to the TSK database as $CarvedFiles under the passed-in parent id.
*
* @param xmlInputFile The XML file we are trying to read and parse
* @param id The parent id of the unallocated space we are parsing.
* @param af The AbstractFile representing the unallocated space we are parsing.
* @return A List<LayoutFile> containing all the files added into the database
* @throws FileNotFoundException
* @throws IOException
*/
List<LayoutFile> parse(File xmlInputFile, long id, AbstractFile af) throws FileNotFoundException, IOException {
try {
final Document doc = XMLUtil.loadDoc(PhotoRecCarverOutputParser.class, xmlInputFile.toString());
if (doc == null) {
return null;
}
Element root = doc.getDocumentElement();
if (root == null) {
logger.log(Level.SEVERE, "Error loading config file: invalid file format (bad root)."); //NON-NLS
return null;
}
NodeList fileObjects = root.getElementsByTagName("fileobject"); //NON-NLS
final int numberOfFiles = fileObjects.getLength();
if (numberOfFiles == 0) {
return null;
}
String fileName;
Long fileSize;
NodeList fileNames;
NodeList fileSizes;
NodeList fileRanges;
Element entry;
Path filePath;
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
// create and initialize the list to put into the database
List<CarvedFileContainer> carvedFileContainer = new ArrayList<>();
for (int fileIndex = 0; fileIndex < numberOfFiles; ++fileIndex) {
entry = (Element) fileObjects.item(fileIndex);
fileNames = entry.getElementsByTagName("filename"); //NON-NLS
fileSizes = entry.getElementsByTagName("filesize"); //NON-NLS
fileRanges = entry.getElementsByTagName("byte_run"); //NON-NLS
fileSize=Long.parseLong(fileSizes.item(0).getTextContent());
fileName=fileNames.item(0).getTextContent();
filePath = Paths.get(fileName);
if (filePath.startsWith(basePath)) {
fileName = filePath.getFileName().toString();
}
List<TskFileRange> tskRanges = new ArrayList<>();
for (int rangeIndex = 0; rangeIndex < fileRanges.getLength(); ++rangeIndex) {
Long img_offset = Long.parseLong(((Element) fileRanges.item(rangeIndex)).getAttribute("img_offset")); //NON-NLS
Long len = Long.parseLong(((Element) fileRanges.item(rangeIndex)).getAttribute("len")); //NON-NLS
// Verify PhotoRec's output
long fileByteStart = af.convertToImgOffset(img_offset);
if (fileByteStart == -1) {
// This better never happen... Data for this file is corrupted. Skip it.
logger.log(Level.INFO, "Error while parsing PhotoRec output for file {0}", fileName); //NON-NLS
continue;
}
// check that carved file is within unalloc block
long fileByteEnd = img_offset + len;
if (fileByteEnd > af.getSize()) {
long overshoot = fileByteEnd - af.getSize();
if (fileSize > overshoot) {
fileSize = fileSize - overshoot;
} else {
// This better never happen... Data for this file is corrupted. Skip it.
continue;
}
}
tskRanges.add(new TskFileRange(fileByteStart, len, rangeIndex));
}
if (!tskRanges.isEmpty()) {
carvedFileContainer.add(new CarvedFileContainer(fileName, fileSize, id, tskRanges));
}
}
return fileManager.addCarvedFiles(carvedFileContainer);
}
catch (NumberFormatException | TskCoreException ex) {
logger.log(Level.SEVERE, "Error parsing PhotoRec output and inserting it into the database: {0}", ex); //NON_NLS
}
List<LayoutFile> empty = Collections.emptyList();
return empty;
}
}
| sidheshenator/autopsy | Core/src/org/sleuthkit/autopsy/modules/photoreccarver/PhotoRecCarverOutputParser.java | Java | apache-2.0 | 6,641 |
var detect_flowint_8c =
[
[ "MAX_SUBSTRINGS", "detect-flowint_8c.html#a7d9ab03945d9f1a2af62c4bb49206536", null ],
[ "PARSE_REGEX", "detect-flowint_8c.html#adcc3158aa6bb4d1bd0ddf953a33f55ec", null ],
[ "DetectFlowintFree", "detect-flowint_8c.html#a7fbc34befd7d405cffd01896b8fddf6f", null ],
[ "DetectFlowintMatch", "detect-flowint_8c.html#a40c8b26b75abf03617041f16184e36e3", null ],
[ "DetectFlowintParse", "detect-flowint_8c.html#a5de91a84b01bbd9025e80cb39d1afed6", null ],
[ "DetectFlowintPrintData", "detect-flowint_8c.html#aeaa2897d6d4cf82a976ab60a421073c2", null ],
[ "DetectFlowintRegister", "detect-flowint_8c.html#ac30dcb9bacbf06d17420281f2b697864", null ],
[ "DetectFlowintRegisterTests", "detect-flowint_8c.html#acf3e2f66ce897b5d596da1391084f7f4", null ]
]; | onosfw/apis | suricata/apis/detect-flowint_8c.js | JavaScript | apache-2.0 | 798 |
/*
* gucefVFS: GUCEF module implementing a Virtual File System
* Copyright (C) 2002 - 2007. Dinand Vanvelzen
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*-------------------------------------------------------------------------//
// //
// INCLUDES //
// //
//-------------------------------------------------------------------------*/
#include "gucefVFS_CVFSHandle.h"
/*-------------------------------------------------------------------------//
// //
// NAMESPACE //
// //
//-------------------------------------------------------------------------*/
namespace GUCEF {
namespace VFS {
/*-------------------------------------------------------------------------//
// //
// UTILITIES //
// //
//-------------------------------------------------------------------------*/
CVFSHandle::CVFSHandle( CORE::CIOAccess* fileAccess ,
const CORE::CString& filename ,
const CORE::CString& filePath )
: m_fileAccess( fileAccess ) ,
m_filename( filename ) ,
m_bufferPtr() ,
m_filePath( filePath )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CVFSHandle::CVFSHandle( CORE::CIOAccess* fileAccess ,
const CORE::CString& filename ,
const CORE::CString& filePath ,
TDynamicBufferPtr& bufferPtr )
: m_fileAccess( fileAccess ) ,
m_filename( filename ) ,
m_bufferPtr( bufferPtr ) ,
m_filePath( filePath )
{GUCEF_TRACE;
}
/*-------------------------------------------------------------------------*/
CVFSHandle::~CVFSHandle()
{GUCEF_TRACE;
/* dont do anything here, this is just a storage / encapsulation class */
}
/*-------------------------------------------------------------------------*/
const CORE::CString&
CVFSHandle::GetFilename( void ) const
{GUCEF_TRACE;
return m_filename;
}
/*-------------------------------------------------------------------------*/
const CORE::CString&
CVFSHandle::GetFilePath( void ) const
{GUCEF_TRACE;
return m_filePath;
}
/*-------------------------------------------------------------------------*/
CORE::CIOAccess*
CVFSHandle::GetAccess( void )
{GUCEF_TRACE;
return m_fileAccess;
}
/*-------------------------------------------------------------------------*/
bool
CVFSHandle::IsLoadedInMemory( void ) const
{GUCEF_TRACE;
return !m_bufferPtr.IsNULL();
}
/*-------------------------------------------------------------------------//
// //
// NAMESPACE //
// //
//-------------------------------------------------------------------------*/
}; /* namespace VFS */
}; /* namespace GUCEF */
/*-------------------------------------------------------------------------*/
| LiberatorUSA/GUCEF | platform/gucefVFS/src/gucefVFS_CVFSHandle.cpp | C++ | apache-2.0 | 4,430 |
// ============================================================================
//
// Copyright (C) 2006-2018 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// https://github.com/Talend/data-prep/blob/master/LICENSE
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.dataprep.api.service.command.preparation;
import static org.springframework.beans.factory.config.BeanDefinition.SCOPE_PROTOTYPE;
import static org.talend.dataprep.command.Defaults.asNull;
import java.net.URISyntaxException;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.utils.URIBuilder;
import org.springframework.context.annotation.Scope;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import org.talend.dataprep.command.GenericCommand;
import org.talend.dataprep.exception.TDPException;
import org.talend.dataprep.exception.error.CommonErrorCodes;
/**
* Command used to move a preparation.
*/
@Component
@Scope(SCOPE_PROTOTYPE)
public class PreparationMove extends GenericCommand<Void> {
/**
* Default constructor.
*
* @param id The preparation id to move.
* @param folder where to find the preparation.
* @param destination Where to move the preparation to.
* @param newName Optional new preparation name.
*/
// private constructor to ensure the IoC
private PreparationMove(String id, String folder, String destination, String newName) {
super(GenericCommand.DATASET_GROUP);
execute(() -> onExecute(id, folder, destination, newName));
on(HttpStatus.OK).then(asNull());
}
private HttpRequestBase onExecute(String id, String folder, String destination, String newName) {
try {
URIBuilder uriBuilder = new URIBuilder(preparationServiceUrl + "/preparations/" + id + "/move");
if (StringUtils.isNotBlank(folder)) {
uriBuilder.addParameter("folder", folder);
}
if (StringUtils.isNotBlank(destination)) {
uriBuilder.addParameter("destination", destination);
}
if (StringUtils.isNotBlank(newName)) {
uriBuilder.addParameter("newName", newName);
}
return new HttpPut(uriBuilder.build());
} catch (URISyntaxException e) {
throw new TDPException(CommonErrorCodes.UNEXPECTED_EXCEPTION, e);
}
}
}
| Talend/data-prep | dataprep-api/src/main/java/org/talend/dataprep/api/service/command/preparation/PreparationMove.java | Java | apache-2.0 | 2,751 |
/**
* Copyright 2014 Eediom Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.araqne.logdb.query.expr;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
public class ArrayTest {
@Test
public void testManual() {
// There is no way to express null without using field yet.
// assertArrayEquals(new Object[] {null}, (Object[])FunctionUtil.parseExpr("array(null)").eval(null));
assertListEquals(Arrays.asList(new Object[] { 1 }), toList(FunctionUtil.parseExpr("array(1)").eval(null)));
assertListEquals(Arrays.asList(new Object[] { "hello", "world" }),
toList(FunctionUtil.parseExpr("array(\"hello\", \"world\")").eval(null)));
assertListEquals(Arrays.asList(new Object[] { 42L, "the answer to life, the universe, and everything" }),
toList(FunctionUtil.parseExpr("array(21 * 2, \"the answer to life, the universe, and everything\")").eval(null)));
}
@SuppressWarnings("unchecked")
static List<Object> toList(Object t) {
return (List<Object>)t;
}
static void assertListEquals(List<Object> expected, List<Object> actual) {
if (expected == null) {
assertNull(actual);
} else {
assertNotNull(actual);
}
assertEquals(expected.size(), actual.size());
for (int i = 0, n = expected.size(); i < n; ++i) {
assertEquals(expected.get(i), actual.get(i));
}
}
}
| araqne/logdb | araqne-logdb/src/test/java/org/araqne/logdb/query/expr/ArrayTest.java | Java | apache-2.0 | 1,892 |
using dBosque.Stub.Interfaces;
using dBosque.Stub.Services;
using dBosque.Stub.Services.Extensions;
using dBosque.Stub.Services.Types;
using dBosque.Stub.Server.Soap.Interface;
using Microsoft.AspNetCore.Http;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Runtime.Serialization;
using System.ServiceModel.Channels;
using System.Text;
using System.Xml;
using System.Xml.Linq;
namespace dBosque.Stub.Server.Soap.Types
{
public class SoapStubMessage : StubMessage<Message>
{
private MessageVersion Version { get; set; }
private Message Message { get; set; }
private MessageBuffer Buffer { get; set; }
/// <summary>
/// Create a new StubMessage class based on a WCF message
/// </summary>
/// <param name="msg"></param>
public SoapStubMessage(Message msg, HttpContext context, string tenant )
: base(tenant, 200, ContentTypes.ApplicationXml)
{
var connectionInfo = context?.Connection;
Sender = $"{connectionInfo?.RemoteIpAddress}:{connectionInfo?.RemotePort}";
Buffer = msg.CreateBufferedCopy(8192);
Message = Buffer.CreateMessage();
ParseMessage();
}
private void ParseMessage()
{
var xrdr = Message.GetReaderAtBodyContents();
Action = Message.Headers.Action;
RootNameSpace = xrdr.NamespaceURI;
RootNode = xrdr.LocalName;
var body = Request = RawRequest = xrdr.ReadOuterXml();
Request = Message.ToString();
Request = Request.Replace("... stream ...", body);
Version = Message.Version;
}
///<summary>
///Return a valid UnAuthorized message
///</summary>
///<returns></returns>
public override Message AsUnauthorized()
{
throw new StubErrorException(System.Net.HttpStatusCode.Unauthorized);
}
///<summary>
///Return a valid result message
///</summary>
///<returns></returns>
public override Message AsResult()
{
string messageStream = string.IsNullOrEmpty(Response)?"<empty/>":Response;
if (!HasMatch && !IsPassTrough)
messageStream = SerializeToString(AsFault());
var memoryStream = new MemoryStream(Encoding.UTF8.GetBytes(messageStream));
var reader = XmlReader.Create(memoryStream, new XmlReaderSettings() { XmlResolver = null, DtdProcessing = DtdProcessing.Prohibit });
if (HttpStatusCode >= 300)
throw new StubErrorException(Convert(), Message.CreateMessage(Version, Action, reader));
return Message.CreateMessage(Version, Action, reader);
}
/// <summary>
/// Generate a MatchList based on all matches
/// </summary>
/// <returns></returns>
private List<Match> ToMatchList()
{
var matches = new List<Match>();
foreach (var m in Matches)
{
var match = new Match(m.Description);
m.Items.ForEach(x => match.XPath.Add(new XPath(x.Expression, x.Value)));
matches.Add(match);
}
return matches;
}
private static XmlElement GetElement(string xml)
{
XmlDocument doc = new XmlDocument();
doc.LoadXml(xml);
return doc.DocumentElement;
}
/// <summary>
/// Create a faultcontract
/// </summary>
/// <returns></returns>
private StubFaultContract AsFault()
{
StubFaultContract result = null;
if (!HasMatch && Matches.Count == 0)
result = new StubFaultContract()
{
Request = Request == null ? null : GetElement(Request),
Message = Matches.Error
};
else if (!HasMatch && Matches.Count > 0)
result = new StubFaultContract()
{
Message = "Multiple matches found.",
Request = GetElement(Request),
Matches = ToMatchList()
};
else if (HasMatch)
result = new StubFaultContract()
{
Message = "One match found.",
Request = GetElement(Request),
Matches = ToMatchList()
};
return result;
}
///<summary>
///Localize a xpath
///</summary>
///<param name="xpath"></param>
///<returns></returns>
public override string LocalizeXpath(string xpath)
{
return xpath.AppendSoapEnvelope();
}
private string SerializeToString<T>(T obj, string request = null)
{
DataContractSerializer serializer = new DataContractSerializer(typeof(T));
MemoryStream memoryStream = new MemoryStream();
serializer.WriteObject(memoryStream, obj);
memoryStream.Position = 0;
StreamReader r = new StreamReader(memoryStream);
string reader = r.ReadToEnd();
if (request != null)
reader = reader.Replace("<Request i:nil=\"true\"/>", $"<Request>{request}</Request");
return reader;
}
///<summary>
///Relay the message to the specific uri.
///</summary>
///<param name="uri"></param>
public override void Relay(string uri)
{
Message = Buffer.CreateMessage();
// Remove all headers.
Message.Headers.ToList().ForEach(a => Message.Headers.RemoveAll(a.Name, a.Namespace));
Message.Headers.RemoveAll("To", "http://schemas.microsoft.com/ws/2005/05/addressing/none");
ParseMessage();
HttpWebRequest webRequest = (HttpWebRequest)WebRequest.Create(uri);
UpdateHeaderProperties(webRequest);
webRequest.Method = "POST";
webRequest.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
XmlDocument soapEnvelopeXml = new XmlDocument();
soapEnvelopeXml.LoadXml(Request);
using (Stream stream = webRequest.GetRequestStream())
{
soapEnvelopeXml.Save(stream);
}
try
{
using (WebResponse response = webRequest.GetResponse())
{
Response = ParseResponse(response);
}
}
catch (WebException ex)
{
Response = ParseResponse(ex.Response);
}
Matches = new StubMatchList();
}
private string ParseResponse(WebResponse response)
{
using (StreamReader rd = new StreamReader(response.GetResponseStream()))
{
string soapResult = rd.ReadToEnd();
XDocument xDoc = XDocument.Load(new StringReader(soapResult));
var unwrappedResponse = xDoc.Descendants((XNamespace)"http://schemas.xmlsoap.org/soap/envelope/" + "Body")
.First()
.FirstNode;
return unwrappedResponse.ToString();
}
}
private string GetHeaderValue(WebHeaderCollection headers, string key, string def = null)
{
return headers.AllKeys.Contains(key) ? headers[key] : def;
}
private void UpdateHeaderProperties(HttpWebRequest request)
{
WebHeaderCollection headers = new WebHeaderCollection();
if (Message.Properties.ContainsKey(HttpRequestMessageProperty.Name))
{
headers = (Message.Properties[HttpRequestMessageProperty.Name] as HttpRequestMessageProperty).Headers;
foreach (var header in headers.AllKeys)
{
try
{
request.Headers.Add(header, headers[header]);
}
catch
{
// catch all
}
}
}
request.ContentType = GetHeaderValue(headers, "ContentType", $"{ContentTypes.TextXml};charset=\"utf-8\"");
request.Accept = GetHeaderValue(headers, "Accept", ContentTypes.TextXml);
}
}
}
| dbosque/Stub | dBosque.Stub.Server.Soap/Types/SoapStubMessage.cs | C# | apache-2.0 | 8,596 |
package com.hantsylabs.example.spring.jpa.spec;
import java.util.Date;
import com.hantsylabs.example.spring.model.QConference;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Predicate;
public class QueryDslPredicates {
public static Predicate inProgressConferences() {
QConference conf = QConference.conference;
final Date now = new Date();
BooleanBuilder builder = new BooleanBuilder();
return builder.and(conf.startedDate.before(now))
.and(conf.endedDate.after(now)).getValue();
}
public static Predicate pastConferences(Date _past) {
QConference conf = QConference.conference;
final Date now = new Date();
BooleanBuilder builder = new BooleanBuilder();
builder.and(conf.endedDate.before(now));
if (_past != null) {
builder.and(conf.startedDate.after(_past));
}
return builder.getValue();
}
public static Predicate upcomingConferences() {
QConference conf = QConference.conference;
final Date now = new Date();
return conf.startedDate.after(now);
}
}
| hantsy/spring4-sandbox | data-jpa/src/main/java/com/hantsylabs/example/spring/jpa/spec/QueryDslPredicates.java | Java | apache-2.0 | 1,031 |
// Copyright (c) 2016 Tigera, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package encap
type Mode string
const (
Undefined Mode = ""
Always = "always"
CrossSubnet = "cross-subnet"
)
const DefaultMode = Always
| projectcalico/calico | libcalico-go/lib/backend/encap/ipip.go | GO | apache-2.0 | 771 |
package com.mjrichardson.teamCity.buildTriggers.Fakes;
import com.mjrichardson.teamCity.buildTriggers.CacheManager;
import java.net.URI;
import java.util.HashMap;
import java.util.UUID;
public class FakeCacheManager implements CacheManager {
private HashMap<String, String> cache = new HashMap<>();
@Override
public String getFromCache(CacheNames cacheName, URI uri, UUID correlationId) {
String key = cacheName.name() + "|" + uri.toString();
if (cache.containsKey(key))
return cache.get(key);
return null;
}
@Override
public void addToCache(CacheNames cacheName, URI uri, String body, UUID correlationId) {
String key = cacheName.name() + "|" + uri.toString();
cache.put(key, body);
}
}
| matt-richardson/teamcity-octopus-build-trigger-plugin | octopus-build-trigger-server/src/test/java/com/mjrichardson/teamCity/buildTriggers/Fakes/FakeCacheManager.java | Java | apache-2.0 | 773 |
package javaselast.examples.csv;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.lang.reflect.Field;
public class CsvReader extends Reader {
private BufferedReader bufferedReader;
public CsvReader(Reader reader) {
bufferedReader = new BufferedReader(reader);
}
public <T> T readNext(Class<T> type) {
try {
T cl = type.newInstance();
// Class<?> c = cl.getClass();
// Field field = c.getDeclaredField("id");
//
// field.set(cl,);
} catch (InstantiationException | IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
return bufferedReader.read(cbuf,off,len);
}
@Override
public void close() throws IOException {
bufferedReader.close();
}
}
| nesterione/JavaTrainings | src/javaselast/examples/csv/CsvReader.java | Java | apache-2.0 | 945 |
import logging
import pika
import sys
if sys.version[0] == '2':
import Queue as queue
else:
import queue as queue
from .. import rabbitutils
import esgfpid.defaults as defaults
from esgfpid.utils import loginfo, logdebug, logtrace, logerror, logwarn, log_every_x_times
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(logging.NullHandler())
'''
The RabbitFeeder is responsible for publishing messages to RabbitMQ.
It is very simple. Basically the only method it exposes
(except for some simple getter/setter which is rarely ever used)
is publish_message(), which is called from the main thread.
'''
class RabbitFeeder(object):
def __init__(self, thread, statemachine, nodemanager):
self.thread = thread
'''
Read-only.
Before publishing a message, we check the state, and we log
the state. '''
self.statemachine = statemachine
self.nodemanager = nodemanager
'''
The deliver_number is important. It defines the number of the message
that is used to identify it between this client and the RabbitMQ
server (e.g. so the correct messages are deleted upon confirmation).
It makes sure that rabbit server and this client talk about the
same message.
NEVER EVER INCREMENT OR OTHERWISE MODIFY THIS!
From the RabbitMQ docs:
"The delivery tag is valid only within the channel from which
the message was received. I.e. a client MUST NOT receive a
message on one channel and then acknowledge it on another."
Source: https://www.rabbitmq.com/amqp-0-9-1-reference.html '''
self.__delivery_number = 1
# Logging
self.__first_publication_trigger = True
self.__logcounter_success = 0 # counts successful publishes!
self.__logcounter_trigger = 0 # counts triggers!
self.__LOGFREQUENCY = 10
self.__have_not_warned_about_connection_fail_yet = True
self.__have_not_warned_about_force_close_yet = True
'''
Triggers the publication of one message to RabbitMQ, if the
state machine currently allows this.
The message is fetched from the Queue of unpublished messages.
So far, whenever the library wants to publish messages, it
fires as many of these "publish_message" events as messages
were published (and some extra, to be sure).
If some of these triggers cannot be acted upon, as the module
is not in a state where it is allowed to publish, the triggers
should be fired as soon as the module is in available state
again.
# TODO: Find better way to make sure enough publish events are fired.
Are we sure there is not ever a way to have some messages
in the unpublished Queue that could be sent, but aren't, because
no event was fired for them? For example, if an exception occurs
during publish, and the message was put back - will there ever
be an event to trigger its publication? I don't think so.
Interim solution (hack):
(a) At the moment, for every message that the publisher hands
over, I fire two events (rabbitthread).
(b) During the close-down algorithm, if there is unpublished
messages, I fire publish events, to make sure they are
published (thread_shutter).
'''
def publish_message(self):
try:
return self.__publish_message()
except Exception as e:
logwarn(LOGGER, 'Error in feeder.publish_message(): %s: %s', e.__class__.__name__, repr(e))
raise e
def __publish_message(self):
self.__logcounter_trigger += 1
if self.statemachine.is_NOT_STARTED_YET() or self.statemachine.is_WAITING_TO_BE_AVAILABLE():
log_every_x_times(LOGGER, self.__logcounter_trigger, self.__LOGFREQUENCY, 'Received early trigger for feeding the rabbit (trigger %i).', self.__logcounter_trigger)
self.__log_why_cannot_feed_the_rabbit_now()
elif self.statemachine.is_AVAILABLE() or self.statemachine.is_AVAILABLE_BUT_WANTS_TO_STOP():
log_every_x_times(LOGGER, self.__logcounter_trigger, self.__LOGFREQUENCY, 'Received trigger for publishing message to RabbitMQ (trigger %i).', self.__logcounter_trigger)
self.__log_publication_trigger()
self.__publish_message_to_channel()
elif self.statemachine.is_PERMANENTLY_UNAVAILABLE() or self.statemachine.is_FORCE_FINISHED():
log_every_x_times(LOGGER, self.__logcounter_trigger, self.__LOGFREQUENCY, 'Received late trigger for feeding the rabbit (trigger %i).', self.__logcounter_trigger)
self.__log_why_cannot_feed_the_rabbit_now()
''' This method only logs. '''
def __log_publication_trigger(self):
if self.__first_publication_trigger:
logdebug(LOGGER, 'Received first trigger for publishing message to RabbitMQ.')
self.__first_publication_trigger = False
logtrace(LOGGER, 'Received trigger for publishing message to RabbitMQ, and module is ready to accept it.')
''' This method only logs, depending on the state machine's state.'''
def __log_why_cannot_feed_the_rabbit_now(self):
log_every_x_times(LOGGER, self.__logcounter_trigger, self.__LOGFREQUENCY, 'Cannot publish message to RabbitMQ (trigger no. %i).', self.__logcounter_trigger)
if self.statemachine.is_WAITING_TO_BE_AVAILABLE():
logdebug(LOGGER, 'Cannot publish message to RabbitMQ yet, as the connection is not ready.')
elif self.statemachine.is_NOT_STARTED_YET():
logerror(LOGGER, 'Cannot publish message to RabbitMQ, as the thread is not running yet.')
elif self.statemachine.is_PERMANENTLY_UNAVAILABLE() or self.statemachine.is_FORCE_FINISHED():
if self.statemachine.detail_could_not_connect:
logtrace(LOGGER, 'Could not publish message to RabbitMQ, as the connection failed.')
if self.__have_not_warned_about_connection_fail_yet:
logwarn(LOGGER, 'Could not publish message(s) to RabbitMQ. The connection failed definitively.')
self.__have_not_warned_about_connection_fail_yet = False
elif self.statemachine.get_detail_closed_by_publisher():
logtrace(LOGGER, 'Cannot publish message to RabbitMQ, as the connection was closed by the user.')
if self.__have_not_warned_about_force_close_yet:
logwarn(LOGGER, 'Could not publish message(s) to RabbitMQ. The sender was closed by the user.')
self.__have_not_warned_about_force_close_yet = False
else:
if self.thread._channel is None:
logerror(LOGGER, 'Very unexpected. Could not publish message(s) to RabbitMQ. There is no channel.')
'''
Retrieves a message from stack and tries to publish it
to RabbitMQ.
In case of failure, it is put back. In case of success,
it is handed on to the confirm module that is responsible
for waiting for RabbitMQ's confirmation.
Note: The publish may cause an error if the Channel was closed.
A closed Channel should be handled in the on_channel_close()
callback, but we catch it here in case the clean up was not quick enough.
'''
def __publish_message_to_channel(self):
# Find a message to publish.
# If no messages left, well, nothing to publish!
try:
message = self.__get_message_from_stack()
except queue.Empty as e:
logtrace(LOGGER, 'Queue empty. No more messages to be published.')
return
# Now try to publish it.
# If anything goes wrong, you need to put it back to
# the stack of unpublished messages!
try:
success = self.__try_publishing_otherwise_put_back_to_stack(message)
if success:
self.__postparations_after_successful_feeding(message)
# Treat various errors that may occur during publishing:
except pika.exceptions.ChannelClosed as e:
logwarn(LOGGER, 'Cannot publish message %i to RabbitMQ because the Channel is closed (%s)', self.__delivery_number+1, repr(e))
except AttributeError as e:
if self.thread._channel is None:
logwarn(LOGGER, 'Cannot publish message %i to RabbitMQ because there is no channel.', self.__delivery_number+1)
else:
logwarn(LOGGER, 'Cannot publish message %i to RabbitMQ (unexpected error %s:%s)', self.__delivery_number+1, e.__class__.__name__, repr(e))
except AssertionError as e:
logwarn(LOGGER, 'Cannot publish message to RabbitMQ %i because of AssertionError: "%s"', self.__delivery_number+1,e)
if 'A non-string value was supplied for self.exchange' in repr(e):
exch = self.thread.get_exchange_name()
logwarn(LOGGER, 'Exchange was "%s" (type %s)', exch, type(exch))
'''
Retrieve an unpublished message from stack.
Note: May block for up to 2 seconds.
:return: A message from the stack of unpublished messages.
:raises: queue.Empty.
'''
def __get_message_from_stack(self, seconds=0):
message = self.thread.get_message_from_unpublished_stack(seconds)
logtrace(LOGGER, 'Found message to be published. Now left in queue to be published: %i messages.', self.thread.get_num_unpublished())
return message
'''
This tries to publish the message and puts it back into the
Queue if it failed.
:param message: Message to be sent.
:raises: pika.exceptions.ChannelClosed, if the Channel is closed.
'''
def __try_publishing_otherwise_put_back_to_stack(self, message):
try:
# Getting message info:
properties = self.nodemanager.get_properties_for_message_publications()
routing_key, msg_string = rabbitutils.get_routing_key_and_string_message_from_message_if_possible(message)
routing_key = self.nodemanager.adapt_routing_key_for_untrusted(routing_key)
# Logging
logtrace(LOGGER, 'Publishing message %i (key %s) (body %s)...', self.__delivery_number+1, routing_key, msg_string) # +1 because it will be incremented after the publish.
log_every_x_times(LOGGER, self.__logcounter_trigger, self.__LOGFREQUENCY, 'Trying actual publish... (trigger no. %i).', self.__logcounter_trigger)
logtrace(LOGGER, '(Publish to channel no. %i).', self.thread._channel.channel_number)
# Actual publish to exchange
self.thread._channel.basic_publish(
exchange=self.thread.get_exchange_name(),
routing_key=routing_key,
body=msg_string,
properties=properties,
mandatory=defaults.RABBIT_MANDATORY_DELIVERY
)
return True
# If anything went wrong, put it back into the stack of
# unpublished messages before re-raising the exception
# for further handling:
except Exception as e:
success = False
logwarn(LOGGER, 'Message was not published. Putting back to queue. Reason: %s: "%s"',e.__class__.__name__, repr(e))
self.thread.put_one_message_into_queue_of_unsent_messages(message)
logtrace(LOGGER, 'Now (after putting back) left in queue to be published: %i messages.', self.thread.get_num_unpublished())
raise e
'''
If a publish was successful, pass it to the confirmer module
and in increment delivery_number for the next message.
'''
def __postparations_after_successful_feeding(self, msg):
# Pass the successfully published message and its delivery_number
# to the confirmer module, to wait for its confirmation.
# Increase the delivery number for the next message.
self.thread.put_to_unconfirmed_delivery_tags(self.__delivery_number)
self.thread.put_to_unconfirmed_messages_dict(self.__delivery_number, msg)
self.__delivery_number += 1
# Logging
self.__logcounter_success += 1
log_every_x_times(LOGGER, self.__logcounter_success, self.__LOGFREQUENCY, 'Actual publish to channel done (trigger no. %i, publish no. %i).', self.__logcounter_trigger, self.__logcounter_success)
logtrace(LOGGER, 'Publishing messages %i to RabbitMQ... done.', self.__delivery_number-1)
if (self.__delivery_number-1 == 1):
loginfo(LOGGER, 'First message published to RabbitMQ.')
logdebug(LOGGER, 'Message published (no. %i)', self.__delivery_number-1)
'''
Reset the delivery_number for the messages.
This must be called on a reconnection / channel reopen!
And may not be called during any other situation!
The number is not sent along to the RabbitMQ server, but
the server keeps track of the delivery number
separately on its side.
That's why it is important to make sure it is incremented
and reset exactly the same way (incremented at each successfully
published message, and reset to one at channel reopen).
(called by the builder during reconnection / channel reopen).
'''
def reset_delivery_number(self):
self.__delivery_number = 1
| IS-ENES-Data/esgf-pid | esgfpid/rabbit/asynchronous/thread_feeder.py | Python | apache-2.0 | 13,289 |
/*
* Copyright (C) 2017 exzogeni.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package alchemy;
import alchemy.query.AlchemyDelete;
import alchemy.query.AlchemyInsert;
import alchemy.query.AlchemyUpdate;
import alchemy.query.AlchemyWhere;
import java.util.Collection;
import java.util.Collections;
public class Alchemy {
private final DataSource mSource;
public Alchemy(DataSource source) {
mSource = source;
}
public <T> AlchemyWhere<T> where(Class<T> clazz) {
return new OpWhere<>(mSource, mSource.where(clazz));
}
public <T> AlchemyInsert<T> insert(T object) {
return insert(Collections.singletonList(object));
}
public <T> AlchemyInsert<T> insert(Collection<T> objects) {
return new OpInsert<>(mSource.insert(objects));
}
public <T> AlchemyUpdate<T> update(T object) {
return update(Collections.singletonList(object));
}
public <T> AlchemyUpdate<T> update(Collection<T> objects) {
return new OpUpdate<>(mSource.update(objects));
}
public <T> AlchemyDelete delete(T object) {
return delete(Collections.singletonList(object));
}
public <T> AlchemyDelete delete(Collection<T> objects) {
return new OpDelete(mSource.delete(objects));
}
}
| DanielSerdyukov/rxsqlite | alchemy/src/main/java/alchemy/Alchemy.java | Java | apache-2.0 | 1,801 |
package net.community.chest.jmx;
import java.io.IOException;
import java.util.Collection;
import net.community.chest.dom.DOMUtils;
import net.community.chest.io.EOLStyle;
import net.community.chest.jmx.dom.MBeanEntryDescriptor;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>Definitions of the proprietary JMX protocol over HTTP</P>
* @author Lyor G.
* @since Jan 7, 2008 12:23:28 PM
*/
public final class JMXProtocol {
private JMXProtocol ()
{
// no instance
}
public static final String DEFAULT_MBEANS_LIST_ROOT_ELEM_NAME="mbeans";
// various request parameters
public static final String ATTRIBUTES_PARAM="attributes", // true=list MBean attributes (default=false)
VALUES_PARAM="values", // true=fetch MBean attributes values (default=false)
OPERATIONS_PARAM="operations", // true=fetch MBean operations (default=false)
PARAMS_PARAM="params", // true=fetch MBean operation parameters (default=false)
NULLS_PARAM="null", // true=include null values (default=false)
NAME_PARAM="name",
DOMAIN_PARAM="domain",
OPNAME_PARAM="opname", // if non-nulll then overrides the operation name in the XML descriptor
UNIQUE_PARAM="unique", // true=invoked operation name is unique (default=true)
PARAMLESS_PARAM="paramless"; // true=no need to read operation XML descriptor since operation has no parameters
// format: http://somehost:port/servlet?req=list[&attributes=true/false][&values=true/false][&name='...'][&null=true/false][operations=true/false][params=true/false]
// format: http://somehost:port/servlet?req=get[&null=true/false]
public static final String REQ_PARAM="req",
ALL_REQ="all", // equivalent to req=list&attributes=true&values=true&null=true&operations=true¶ms=true
LIST_REQ="list",
AGENTS_REQ="agents",
GET_REQ="get", // only the 'null' option is valid
INVOKE_REQ="invoke",
WHEREAMI_REQ="whereami",
SYSPROPS_REQ="sysprops",
ENV_REQ="env",
CONTEXT_REQ="context", // servlet context
CONFIG_REQ="config", // servlet configuration
VERINFO_REQ="verinfo"; // community chest version information
public static final <A extends Appendable> A appendDescriptors (A sb, Collection<? extends MBeanEntryDescriptor> mbl) throws IOException
{
final int numMBeans=(null == mbl) ? 0 : mbl.size();
if (null == sb)
throw new IOException("appendDescriptors(" + numMBeans + ") no " + Appendable.class.getSimpleName() + " instance");
if (numMBeans > 0)
{
for (final MBeanEntryDescriptor mbe : mbl)
{
final String mbString=(null == mbe) ? null : mbe.toString();
if ((null == mbString) || (mbString.length() <= 0))
continue;
sb.append(mbString);
EOLStyle.CRLF.appendEOL(sb);
}
}
return sb;
}
public static final String buildDescriptorsDocument (Collection<? extends MBeanEntryDescriptor> mbl) throws IOException
{
final int numMBeans=(null == mbl) ? 0 : mbl.size();
StringBuilder sb=new StringBuilder(Math.max(numMBeans, 1) * 128 + 64)
.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")
.append(EOLStyle.CRLF.getStyleChars())
.append(DOMUtils.XML_ELEM_START_DELIM)
.append(DEFAULT_MBEANS_LIST_ROOT_ELEM_NAME)
.append(DOMUtils.XML_ELEM_END_DELIM)
.append(EOLStyle.CRLF.getStyleChars())
;
sb = appendDescriptors(sb, mbl);
sb.append(DOMUtils.XML_ELEM_START_DELIM)
.append(DOMUtils.XML_ELEM_CLOSURE_DELIM)
.append(DEFAULT_MBEANS_LIST_ROOT_ELEM_NAME)
.append(DOMUtils.XML_ELEM_END_DELIM)
;
return sb.toString();
}
}
| lgoldstein/communitychest | chest/base-utils/jmx/src/main/java/net/community/chest/jmx/JMXProtocol.java | Java | apache-2.0 | 4,647 |
from typing import ClassVar, FrozenSet
from ..config import Config
from .dependency import IngressClassesDependency, SecretDependency, ServiceDependency
from .k8sobject import KubernetesGVK, KubernetesObject
from .k8sprocessor import ManagedKubernetesProcessor
from .resource import NormalizedResource, ResourceManager
class IngressClassProcessor (ManagedKubernetesProcessor):
CONTROLLER: ClassVar[str] = 'getambassador.io/ingress-controller'
ingress_classes_dep: IngressClassesDependency
def __init__(self, manager: ResourceManager) -> None:
super().__init__(manager)
self.ingress_classes_dep = self.deps.provide(IngressClassesDependency)
def kinds(self) -> FrozenSet[KubernetesGVK]:
return frozenset([
KubernetesGVK('networking.k8s.io/v1beta1', 'IngressClass'),
KubernetesGVK('networking.k8s.io/v1', 'IngressClass'),
])
def _process(self, obj: KubernetesObject) -> None:
# We only want to deal with IngressClasses that belong to "spec.controller: getambassador.io/ingress-controller"
if obj.spec.get('controller', '').lower() != self.CONTROLLER:
self.logger.debug(f'ignoring IngressClass {obj.name} without controller - getambassador.io/ingress-controller')
return
if obj.ambassador_id != Config.ambassador_id:
self.logger.debug(f'IngressClass {obj.name} does not have Ambassador ID {Config.ambassador_id}, ignoring...')
return
# TODO: Do we intend to use this parameter in any way?
# `parameters` is of type TypedLocalObjectReference,
# meaning it links to another k8s resource in the same namespace.
# https://godoc.org/k8s.io/api/core/v1#TypedLocalObjectReference
#
# In this case, the resource referenced by TypedLocalObjectReference
# should not be namespaced, as IngressClass is a non-namespaced resource.
#
# It was designed to reference a CRD for this specific ingress-controller
# implementation... although usage is optional and not prescribed.
ingress_parameters = obj.spec.get('parameters', {})
self.logger.debug(f'Handling IngressClass {obj.name} with parameters {ingress_parameters}...')
self.aconf.incr_count('k8s_ingress_class')
# Don't emit this directly. We use it when we handle ingresses below. If
# we want to use the parameters, we should add them to this dependency
# type.
self.ingress_classes_dep.ingress_classes.add(obj.name)
class IngressProcessor (ManagedKubernetesProcessor):
service_dep: ServiceDependency
ingress_classes_dep: IngressClassesDependency
def __init__(self, manager: ResourceManager) -> None:
super().__init__(manager)
self.deps.want(SecretDependency)
self.service_dep = self.deps.want(ServiceDependency)
self.ingress_classes_dep = self.deps.want(IngressClassesDependency)
def kinds(self) -> FrozenSet[KubernetesGVK]:
return frozenset([
KubernetesGVK('extensions/v1beta1', 'Ingress'),
KubernetesGVK('networking.k8s.io/v1beta1', 'Ingress'),
KubernetesGVK('networking.k8s.io/v1', 'Ingress'),
])
def _update_status(self, obj: KubernetesObject) -> None:
service_status = None
if not self.service_dep.ambassador_service or not self.service_dep.ambassador_service.name:
self.logger.error(f"Unable to set Ingress {obj.name}'s load balancer, could not find Ambassador service")
else:
service_status = self.service_dep.ambassador_service.status
if obj.status != service_status:
if service_status:
status_update = (obj.gvk.kind, obj.namespace, service_status)
self.logger.debug(f"Updating Ingress {obj.name} status to {status_update}")
self.aconf.k8s_status_updates[f'{obj.name}.{obj.namespace}'] = status_update
else:
self.logger.debug(f"Not reconciling Ingress {obj.name}: observed and current statuses are in sync")
def _process(self, obj: KubernetesObject) -> None:
ingress_class_name = obj.spec.get('ingressClassName', '')
has_ingress_class = ingress_class_name in self.ingress_classes_dep.ingress_classes
has_ambassador_ingress_class_annotation = obj.annotations.get('kubernetes.io/ingress.class', '').lower() == 'ambassador'
# check the Ingress resource has either:
# - a `kubernetes.io/ingress.class: "ambassador"` annotation
# - a `spec.ingressClassName` that references an IngressClass with
# `spec.controller: getambassador.io/ingress-controller`
#
# also worth noting, the kube-apiserver might assign the `spec.ingressClassName` if unspecified
# and only 1 IngressClass has the following annotation:
# annotations:
# ingressclass.kubernetes.io/is-default-class: "true"
if not (has_ingress_class or has_ambassador_ingress_class_annotation):
self.logger.debug(f'ignoring Ingress {obj.name} without annotation (kubernetes.io/ingress.class: "ambassador") or IngressClass controller (getambassador.io/ingress-controller)')
return
# We don't want to deal with non-matching Ambassador IDs
if obj.ambassador_id != Config.ambassador_id:
self.logger.debug(f"Ingress {obj.name} does not have Ambassador ID {Config.ambassador_id}, ignoring...")
return
self.logger.debug(f"Handling Ingress {obj.name}...")
self.aconf.incr_count('k8s_ingress')
ingress_tls = obj.spec.get('tls', [])
for tls_count, tls in enumerate(ingress_tls):
tls_secret = tls.get('secretName', None)
if tls_secret is not None:
for host_count, host in enumerate(tls.get('hosts', ['*'])):
tls_unique_identifier = f"{obj.name}-{tls_count}-{host_count}"
spec = {
'ambassador_id': [obj.ambassador_id],
'hostname': host,
'acmeProvider': {
'authority': 'none'
},
'tlsSecret': {
'name': tls_secret
},
'requestPolicy': {
'insecure': {
'action': 'Route'
}
}
}
ingress_host = NormalizedResource.from_data(
'Host',
tls_unique_identifier,
namespace=obj.namespace,
labels=obj.labels,
spec=spec,
)
self.logger.debug(f"Generated Host from ingress {obj.name}: {ingress_host}")
self.manager.emit(ingress_host)
# parse ingress.spec.defaultBackend
# using ingress.spec.backend as a fallback, for older versions of the Ingress resource.
default_backend = obj.spec.get('defaultBackend', obj.spec.get('backend', {}))
db_service_name = default_backend.get('serviceName', None)
db_service_port = default_backend.get('servicePort', None)
if db_service_name is not None and db_service_port is not None:
db_mapping_identifier = f"{obj.name}-default-backend"
default_backend_mapping = NormalizedResource.from_data(
'Mapping',
db_mapping_identifier,
namespace=obj.namespace,
labels=obj.labels,
spec={
'ambassador_id': obj.ambassador_id,
'prefix': '/',
'service': f'{db_service_name}.{obj.namespace}:{db_service_port}'
},
)
self.logger.debug(f"Generated mapping from Ingress {obj.name}: {default_backend_mapping}")
self.manager.emit(default_backend_mapping)
# parse ingress.spec.rules
ingress_rules = obj.spec.get('rules', [])
for rule_count, rule in enumerate(ingress_rules):
rule_http = rule.get('http', {})
rule_host = rule.get('host', None)
http_paths = rule_http.get('paths', [])
for path_count, path in enumerate(http_paths):
path_backend = path.get('backend', {})
path_type = path.get('pathType', 'ImplementationSpecific')
service_name = path_backend.get('serviceName', None)
service_port = path_backend.get('servicePort', None)
path_location = path.get('path', '/')
if not service_name or not service_port or not path_location:
continue
unique_suffix = f"{rule_count}-{path_count}"
mapping_identifier = f"{obj.name}-{unique_suffix}"
# For cases where `pathType: Exact`,
# otherwise `Prefix` and `ImplementationSpecific` are handled as regular Mapping prefixes
is_exact_prefix = True if path_type == 'Exact' else False
spec = {
'ambassador_id': obj.ambassador_id,
'prefix': path_location,
'prefix_exact': is_exact_prefix,
'precedence': 1 if is_exact_prefix else 0, # Make sure exact paths are evaluated before prefix
'service': f'{service_name}.{obj.namespace}:{service_port}'
}
if rule_host is not None:
if rule_host.startswith('*.'):
# Ingress allow specifying hosts with a single wildcard as the first label in the hostname.
# Transform the rule_host into a host_regex:
# *.star.com becomes ^[a-z0-9]([-a-z0-9]*[a-z0-9])?\.star\.com$
spec['host'] = rule_host\
.replace('.', '\\.')\
.replace('*', '^[a-z0-9]([-a-z0-9]*[a-z0-9])?', 1) + '$'
spec['host_regex'] = True
else:
spec['host'] = rule_host
path_mapping = NormalizedResource.from_data(
'Mapping',
mapping_identifier,
namespace=obj.namespace,
labels=obj.labels,
spec=spec,
)
self.logger.debug(f"Generated mapping from Ingress {obj.name}: {path_mapping}")
self.manager.emit(path_mapping)
# let's make arrangements to update Ingress' status now
self._update_status(obj)
# Let's see if our Ingress resource has Ambassador annotations on it
self.manager.emit_annotated(NormalizedResource.from_kubernetes_object_annotation(obj))
| datawire/ambassador | python/ambassador/fetch/ingress.py | Python | apache-2.0 | 11,005 |
/*
* Copyright 2002-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.config;
import org.w3c.dom.Element;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.config.RuntimeBeanReference;
import org.springframework.beans.factory.xml.NamespaceHandlerSupport;
import org.springframework.util.StringUtils;
/**
* {@code NamespaceHandler} allowing for the configuration of declarative
* cache management using either XML or using annotations.
*
* <p>This namespace handler is the central piece of functionality in the
* Spring cache management facilities.
*
* @author Costin Leau
* @since 3.1
*/
public class CacheNamespaceHandler extends NamespaceHandlerSupport {
static final String CACHE_MANAGER_ATTRIBUTE = "cache-manager";
static final String DEFAULT_CACHE_MANAGER_BEAN_NAME = "cacheManager";
static String extractCacheManager(Element element) {
return (element.hasAttribute(CacheNamespaceHandler.CACHE_MANAGER_ATTRIBUTE) ?
element.getAttribute(CacheNamespaceHandler.CACHE_MANAGER_ATTRIBUTE) :
CacheNamespaceHandler.DEFAULT_CACHE_MANAGER_BEAN_NAME);
}
static BeanDefinition parseKeyGenerator(Element element, BeanDefinition def) {
String name = element.getAttribute("key-generator");
if (StringUtils.hasText(name)) {
def.getPropertyValues().add("keyGenerator", new RuntimeBeanReference(name.trim()));
}
return def;
}
@Override
public void init() {
registerBeanDefinitionParser("annotation-driven", new AnnotationDrivenCacheBeanDefinitionParser());
registerBeanDefinitionParser("advice", new CacheAdviceParser());
}
}
| spring-projects/spring-framework | spring-context/src/main/java/org/springframework/cache/config/CacheNamespaceHandler.java | Java | apache-2.0 | 2,210 |
package ca.uhn.fhir.rest.server.interceptor.auth;
/*
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2017 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.defaultString;
import java.util.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hl7.fhir.instance.model.api.*;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.api.TagList;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.AuthenticationException;
import ca.uhn.fhir.rest.server.exceptions.ForbiddenOperationException;
import ca.uhn.fhir.rest.server.interceptor.ServerOperationInterceptorAdapter;
import ca.uhn.fhir.util.CoverageIgnore;
/**
* This class is a base class for interceptors which can be used to
* inspect requests and responses to determine whether the calling user
* has permission to perform the given action.
* <p>
* See the HAPI FHIR
* <a href="http://jamesagnew.github.io/hapi-fhir/doc_rest_server_security.html">Documentation on Server Security</a>
* for information on how to use this interceptor.
* </p>
*/
public class AuthorizationInterceptor extends ServerOperationInterceptorAdapter implements IRuleApplier {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(AuthorizationInterceptor.class);
private PolicyEnum myDefaultPolicy = PolicyEnum.DENY;
/**
* Constructor
*/
public AuthorizationInterceptor() {
super();
}
/**
* Constructor
*
* @param theDefaultPolicy
* The default policy if no rules apply (must not be null)
*/
public AuthorizationInterceptor(PolicyEnum theDefaultPolicy) {
this();
setDefaultPolicy(theDefaultPolicy);
}
private void applyRulesAndFailIfDeny(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
IBaseResource theOutputResource) {
Verdict decision = applyRulesAndReturnDecision(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource);
if (decision.getDecision() == PolicyEnum.ALLOW) {
return;
}
handleDeny(decision);
}
@Override
public Verdict applyRulesAndReturnDecision(RestOperationTypeEnum theOperation, RequestDetails theRequestDetails, IBaseResource theInputResource, IIdType theInputResourceId,
IBaseResource theOutputResource) {
List<IAuthRule> rules = buildRuleList(theRequestDetails);
ourLog.trace("Applying {} rules to render an auth decision for operation {}", rules.size(), theOperation);
Verdict verdict = null;
for (IAuthRule nextRule : rules) {
verdict = nextRule.applyRule(theOperation, theRequestDetails, theInputResource, theInputResourceId, theOutputResource, this);
if (verdict != null) {
ourLog.trace("Rule {} returned decision {}", nextRule, verdict.getDecision());
break;
}
}
if (verdict == null) {
ourLog.trace("No rules returned a decision, applying default {}", myDefaultPolicy);
return new Verdict(myDefaultPolicy, null);
}
return verdict;
}
/**
* Subclasses should override this method to supply the set of rules to be applied to
* this individual request.
* <p>
* Typically this is done by examining <code>theRequestDetails</code> to find
* out who the current user is and then using a {@link RuleBuilder} to create
* an appropriate rule chain.
* </p>
*
* @param theRequestDetails
* The individual request currently being applied
*/
public List<IAuthRule> buildRuleList(RequestDetails theRequestDetails) {
return new ArrayList<IAuthRule>();
}
private OperationExamineDirection determineOperationDirection(RestOperationTypeEnum theOperation, IBaseResource theRequestResource) {
switch (theOperation) {
case ADD_TAGS:
case DELETE_TAGS:
case GET_TAGS:
// These are DSTU1 operations and not relevant
return OperationExamineDirection.NONE;
case EXTENDED_OPERATION_INSTANCE:
case EXTENDED_OPERATION_SERVER:
case EXTENDED_OPERATION_TYPE:
return OperationExamineDirection.BOTH;
case METADATA:
// Security does not apply to these operations
return OperationExamineDirection.IN;
case DELETE:
// Delete is a special case
return OperationExamineDirection.NONE;
case CREATE:
case UPDATE:
case PATCH:
// if (theRequestResource != null) {
// if (theRequestResource.getIdElement() != null) {
// if (theRequestResource.getIdElement().hasIdPart() == false) {
// return OperationExamineDirection.IN_UNCATEGORIZED;
// }
// }
// }
return OperationExamineDirection.IN;
case META:
case META_ADD:
case META_DELETE:
// meta operations do not apply yet
return OperationExamineDirection.NONE;
case GET_PAGE:
case HISTORY_INSTANCE:
case HISTORY_SYSTEM:
case HISTORY_TYPE:
case READ:
case SEARCH_SYSTEM:
case SEARCH_TYPE:
case VREAD:
return OperationExamineDirection.OUT;
case TRANSACTION:
return OperationExamineDirection.BOTH;
case VALIDATE:
// Nothing yet
return OperationExamineDirection.NONE;
default:
// Should not happen
throw new IllegalStateException("Unable to apply security to event of type " + theOperation);
}
}
/**
* The default policy if no rules have been found to apply. Default value for this setting is {@link PolicyEnum#DENY}
*/
public PolicyEnum getDefaultPolicy() {
return myDefaultPolicy;
}
/**
* Handle an access control verdict of {@link PolicyEnum#DENY}.
* <p>
* Subclasses may override to implement specific behaviour, but default is to
* throw {@link ForbiddenOperationException} (HTTP 403) with error message citing the
* rule name which trigered failure
* </p>
*/
protected void handleDeny(Verdict decision) {
if (decision.getDecidingRule() != null) {
String ruleName = defaultString(decision.getDecidingRule().getName(), "(unnamed rule)");
throw new ForbiddenOperationException("Access denied by rule: " + ruleName);
}
throw new ForbiddenOperationException("Access denied by default policy (no applicable rules)");
}
private void handleUserOperation(RequestDetails theRequest, IBaseResource theResource, RestOperationTypeEnum operation) {
applyRulesAndFailIfDeny(operation, theRequest, theResource, theResource.getIdElement(), null);
}
@Override
public void incomingRequestPreHandled(RestOperationTypeEnum theOperation, ActionRequestDetails theProcessedRequest) {
IBaseResource inputResource = null;
IIdType inputResourceId = null;
switch (determineOperationDirection(theOperation, theProcessedRequest.getResource())) {
case IN:
case BOTH:
inputResource = theProcessedRequest.getResource();
inputResourceId = theProcessedRequest.getId();
break;
case OUT:
// inputResource = null;
inputResourceId = theProcessedRequest.getId();
break;
case NONE:
return;
}
RequestDetails requestDetails = theProcessedRequest.getRequestDetails();
applyRulesAndFailIfDeny(theOperation, requestDetails, inputResource, inputResourceId, null);
}
@Override
public boolean outgoingResponse(RequestDetails theRequestDetails, IBaseResource theResponseObject) {
switch (determineOperationDirection(theRequestDetails.getRestOperationType(), null)) {
case IN:
case NONE:
return true;
case BOTH:
case OUT:
break;
}
FhirContext fhirContext = theRequestDetails.getServer().getFhirContext();
List<IBaseResource> resources = Collections.emptyList();
switch (theRequestDetails.getRestOperationType()) {
case SEARCH_SYSTEM:
case SEARCH_TYPE:
case HISTORY_INSTANCE:
case HISTORY_SYSTEM:
case HISTORY_TYPE:
case TRANSACTION:
case GET_PAGE:
case EXTENDED_OPERATION_SERVER:
case EXTENDED_OPERATION_TYPE:
case EXTENDED_OPERATION_INSTANCE: {
if (theResponseObject != null) {
if (theResponseObject instanceof IBaseBundle) {
resources = toListOfResourcesAndExcludeContainer(theResponseObject, fhirContext);
} else if (theResponseObject instanceof IBaseParameters) {
resources = toListOfResourcesAndExcludeContainer(theResponseObject, fhirContext);
}
}
break;
}
default: {
if (theResponseObject != null) {
resources = Collections.singletonList(theResponseObject);
}
break;
}
}
for (IBaseResource nextResponse : resources) {
applyRulesAndFailIfDeny(theRequestDetails.getRestOperationType(), theRequestDetails, null, null, nextResponse);
}
return true;
}
@CoverageIgnore
@Override
public boolean outgoingResponse(RequestDetails theRequestDetails, TagList theResponseObject) {
throw failForDstu1();
}
@CoverageIgnore
@Override
public boolean outgoingResponse(RequestDetails theRequestDetails, TagList theResponseObject, HttpServletRequest theServletRequest, HttpServletResponse theServletResponse)
throws AuthenticationException {
throw failForDstu1();
}
@Override
public void resourceCreated(RequestDetails theRequest, IBaseResource theResource) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.CREATE);
}
@Override
public void resourceDeleted(RequestDetails theRequest, IBaseResource theResource) {
handleUserOperation(theRequest, theResource, RestOperationTypeEnum.DELETE);
}
@Override
public void resourceUpdated(RequestDetails theRequest, IBaseResource theOldResource, IBaseResource theNewResource) {
if (theOldResource != null) {
handleUserOperation(theRequest, theOldResource, RestOperationTypeEnum.UPDATE);
}
handleUserOperation(theRequest, theNewResource, RestOperationTypeEnum.UPDATE);
}
/**
* The default policy if no rules have been found to apply. Default value for this setting is {@link PolicyEnum#DENY}
*
* @param theDefaultPolicy
* The policy (must not be <code>null</code>)
*/
public void setDefaultPolicy(PolicyEnum theDefaultPolicy) {
Validate.notNull(theDefaultPolicy, "theDefaultPolicy must not be null");
myDefaultPolicy = theDefaultPolicy;
}
private List<IBaseResource> toListOfResourcesAndExcludeContainer(IBaseResource theResponseObject, FhirContext fhirContext) {
List<IBaseResource> resources;
resources = fhirContext.newTerser().getAllPopulatedChildElementsOfType(theResponseObject, IBaseResource.class);
// Exclude the container
if (resources.size() > 0 && resources.get(0) == theResponseObject) {
resources = resources.subList(1, resources.size());
}
return resources;
}
private static UnsupportedOperationException failForDstu1() {
return new UnsupportedOperationException("Use of this interceptor on DSTU1 servers is not supportd");
}
private enum OperationExamineDirection {
BOTH,
IN,
NONE,
OUT,
}
public static class Verdict {
private final IAuthRule myDecidingRule;
private final PolicyEnum myDecision;
public Verdict(PolicyEnum theDecision, IAuthRule theDecidingRule) {
myDecision = theDecision;
myDecidingRule = theDecidingRule;
}
public IAuthRule getDecidingRule() {
return myDecidingRule;
}
public PolicyEnum getDecision() {
return myDecision;
}
@Override
public String toString() {
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
b.append("rule", myDecidingRule.getName());
b.append("decision", myDecision.name());
return b.build();
}
}
}
| eug48/hapi-fhir | hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/auth/AuthorizationInterceptor.java | Java | apache-2.0 | 12,075 |
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.observers;
import java.util.concurrent.atomic.AtomicInteger;
import io.reactivex.internal.fuseable.QueueDisposable;
/**
* An abstract QueueDisposable implementation, extending an AtomicInteger,
* that defaults all unnecessary Queue methods to throw UnsupportedOperationException.
* @param <T> the output value type
*/
public abstract class BasicIntQueueDisposable<T>
extends AtomicInteger
implements QueueDisposable<T> {
private static final long serialVersionUID = -1001730202384742097L;
@Override
public final boolean offer(T e) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public final boolean offer(T v1, T v2) {
throw new UnsupportedOperationException("Should not be called");
}
}
| benjchristensen/RxJava | src/main/java/io/reactivex/internal/observers/BasicIntQueueDisposable.java | Java | apache-2.0 | 1,391 |
package com.facepp.demo.mediacodec;
import android.opengl.EGL14;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import java.io.IOException;
/**
* Created by xiejiantao on 2017/10/27.
*/
public class MediaHelper {
/**
* muxer for audio/video recording
*/
private MediaMuxerWrapper mMuxer;
private int mCameraWidth,mCameraHeight;// be video
private GLSurfaceView mSurfaceView;
private int mTextureId;
private MediaVideoEncoder mMediaVideoEncode;
private final float[] mMvpMatrix = new float[16];
/**
*
* @param cameraWidth
* @param cameraHeight
* @param isLand 横屏
*/
public MediaHelper(int cameraWidth, int cameraHeight, boolean isLand, GLSurfaceView surfaceView){
if (!isLand) {
mCameraWidth = cameraWidth;
mCameraHeight = cameraHeight;
} else {
mCameraWidth = cameraHeight;
mCameraHeight = cameraWidth;
}
mSurfaceView=surfaceView;
Matrix.setIdentityM(mMvpMatrix,0);
Matrix.rotateM(mMvpMatrix,0,270,0,0,1);
// TODO: 2017/10/27
}
/**
* start resorcing
* This is a sample project and call this on UI thread to avoid being complicated
* but basically this should be called on private thread because prepareing
* of encoder is heavy work
*/
public void startRecording(int textureId) {
mTextureId=textureId;
try {
mMuxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
if (true) {
// for video capturing
new MediaVideoEncoder(mMuxer, mMediaEncoderListener, mCameraWidth, mCameraHeight);
}
// if (true) {
// // for audio capturing
// new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
// }
mMuxer.prepare();
mMuxer.startRecording();
} catch (final IOException e) {
}
}
/**
* request stop recording
*/
public void stopRecording() {
if (mMuxer != null) {
mMuxer.stopRecording();
mMuxer = null;
// you should not wait here
}
}
/**
* callback methods from encoder
*/
private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
@Override
public void onPrepared(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder)
setVideoEncoder((MediaVideoEncoder)encoder);
}
@Override
public void onStopped(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder)
setVideoEncoder(null);
}
};
private void setVideoEncoder(final MediaVideoEncoder encoder) {
mSurfaceView.queueEvent(new Runnable() {
@Override
public void run() {
synchronized (mSurfaceView) {
if (encoder != null) {
encoder.setEglContext(EGL14.eglGetCurrentContext(), mTextureId);
mMediaVideoEncode=encoder;
}
}
}
});
}
public void frameAvailable(float[] mStMatrix,float [] mMvpMatrix){
if (mMediaVideoEncode != null) {
// notify to capturing thread that the camera frame is available.
// mVideoEncoder.frameAvailableSoon(mStMatrix);
mMediaVideoEncode.frameAvailableSoon(mStMatrix, mMvpMatrix);
}
}
public void frameAvailable(float[] mStMatrix){
if (mMediaVideoEncode != null) {
// notify to capturing thread that the camera frame is available.
// mVideoEncoder.frameAvailableSoon(mStMatrix);
mMediaVideoEncode.frameAvailableSoon(mStMatrix,mMvpMatrix);
}
}
public void frameAvailable(){
if (mMediaVideoEncode != null) {
// notify to capturing thread that the camera frame is available.
// mVideoEncoder.frameAvailableSoon(mStMatrix);
mMediaVideoEncode.frameAvailableSoon();
}
}
}
| FacePlusPlus/MegviiFacepp-Android-SDK | faceppdemo/src/main/java/com/facepp/demo/mediacodec/MediaHelper.java | Java | apache-2.0 | 4,231 |
#include <ap_int.h>
#include "spbits.h"
#include "deltas.h"
#define bw_num 2
void delta::best_delta_seg_ch(
ap_uint<bw_th> dth [seg_ch*seg_ch], //change this when nseg value is changed
ap_uint<seg_ch*seg_ch> sth,//change this when nseg value is changed
ap_uint<seg_ch*seg_ch> dvl,//change this when nseg value is changed
ap_uint<bw_th> *bth, // smallest delta
ap_uint<1> *bsg, // sign of bth
ap_uint<1> *bvl, // valid flag
ap_uint<2> *bnm // winner number//change this when bnum value is changed
)
{
#pragma HLS INTERFACE ap_ctrl_none port=return
#pragma HLS PIPELINE II=1
#pragma HLS ARRAY_PARTITION variable=dth complete dim=1
const int nseg = seg_ch * seg_ch;
ap_uint<1> one_val;
int i;
ap_uint<bw_th> cmp1 [nseg/2];
#pragma HLS ARRAY_PARTITION variable=cmp1 complete dim=1
ap_uint<bw_th> cmp2 [nseg/4];
#pragma HLS ARRAY_PARTITION variable=cmp2 complete dim=1
ap_uint<nseg/2> sig1;
ap_uint<nseg/4> sig2;
ap_uint<bw_num> num1 [nseg/2];
#pragma HLS ARRAY_PARTITION variable=num1 complete dim=1
ap_uint<bw_num> num2 [nseg/4];
#pragma HLS ARRAY_PARTITION variable=num2 complete dim=1
ap_uint<bw_th> a_bth; // smallest delta
ap_uint<1> a_bsg; // sign of bth
ap_uint<1> a_bvl; // valid flag
ap_uint<bw_num> a_bnm; // winner number
ap_uint<seg_ch * seg_ch> a_dvl;
a_dvl=dvl;
// first comparator stage
for (i = 0; i < nseg/2; i = i+1){
// no valid flag analysis here
// we need to take all thetas into account
// differences from invalid thetas are set to max value, so they will not pass sorting
if (dth[i*2] < dth[i*2+1]){
cmp1[i] = dth[i*2];
sig1[i] = sth[i*2];
num1[i] = i*2;
}
else
{
cmp1[i] = dth[i*2+1];
sig1[i] = sth[i*2+1];
num1[i] = i*2+1;
}
}
// second comparator stage
for (i = 0; i < nseg/4; i = i+1){
if (cmp1[i*2] < cmp1[i*2+1]){
cmp2[i] = cmp1[i*2];
sig2[i] = sig1[i*2];
num2[i] = num1[i*2];
}
else
{
cmp2[i] = cmp1[i*2+1];
sig2[i] = sig1[i*2+1];
num2[i] = num1[i*2+1];
}
}
// third comparator stage if needed
if (nseg/4 > 1){
if (cmp2[0] < cmp2[1]){
a_bth = cmp2[0];
a_bsg = sig2[0];
a_bnm = num2[0];
}
else
{
a_bth = cmp2[1];
a_bsg = sig2[1];
a_bnm = num2[1];
}
}
else
{
a_bth = cmp2[0];
a_bsg = sig2[0];
a_bnm = num2[0];
}
// output valid if one or more inputs are valid
a_bvl= a_dvl.or_reduce();
*bth=a_bth;
*bsg=a_bsg;
*bnm=a_bnm;
*bvl=a_bvl;
}
| nikhilghanathe/HLS-for-EMTF | sources_sim/best_delta.cpp | C++ | apache-2.0 | 2,443 |
'use strict';
// Report overall code coverage from Istanbul coverage files.
// Implemented in ES5 for now
/* eslint no-var: 0 */
var _ = require('underscore');
var path = require('path');
var fs = require('fs');
var util = require('util');
var tty = require('tty');
var istanbul = require('istanbul');
var map = _.map;
var filter = _.filter;
var pairs = _.pairs;
var object = _.object;
var clone = _.clone;
var extend = _.extend;
var values = _.values;
var flatten = _.flatten;
var reduce = _.reduce;
var identity = _.identity;
var memoize = _.memoize;
/* eslint no-process-exit: 1 */
// Return the path of the Abacus module dir containing a file
var moddir = function(file) {
if(file === '.' || file === '/') return undefined;
if(/cf-abacus.*/.test(path.basename(file))) return file;
return moddir(path.dirname(file));
};
// Convert the covered file paths in the given coverage info to relative paths
// to the original source files
var sources = function(root, cov) {
return object(filter(map(pairs(cov), function(file) {
// Determine the build path and the name of the module containing each
// covered file
var mdir = moddir(file[0]);
var mod = path.basename(mdir);
// Determine the path to the module source directory
var sdir = root.dependencies[mod] || root.devDependencies[mod];
if(!sdir)
return [file[0], file[1]];
// Return a covered object with a relative path to the original source
// of the covered file
var lib = path.join(sdir, file[0].substr(mdir.length + 1)).split(':').reverse()[0].split('/');
var l = lib.lastIndexOf('lib');
var src = lib.slice(0, l).concat(['src']).concat(lib.slice(l + 1)).join('/');
return [src, extend(clone(file[1]), { path: src })];
}), function(file) { return file[1]; }));
};
// Return a list of all the individual json coverage files for our modules
var covfiles = function(cb) {
fs.readdir('node_modules', function(err, files) {
cb(undefined, filter([path.join('.coverage', 'coverage.json')].concat(err ? [] :
map(files, function(file) {
return path.join('node_modules', file, '.coverage', 'coverage.json');
})), fs.existsSync));
});
};
// Return a coverage collector loaded with all the given files
var collect = function(root, cb) {
covfiles(function(err, files) {
if(err) cb(err);
var collector = new istanbul.Collector();
map(files, function(file) {
collector.add(sources(root, JSON.parse(fs.readFileSync(file))));
});
cb(undefined, collector);
});
};
// Compute overall line and statement coverage percentages
var percentages = function(coverage) {
// Count overall covered and totals of lines, statements and branches
var t = reduce(values(coverage), function(a, cov) {
var l = values(cov.l);
var s = values(cov.s);
var b = flatten(values(cov.b));
return {
l: { covered: a.l.covered + filter(l, identity).length, total: a.l.total + l.length },
s: { covered: a.s.covered + filter(s, identity).length, total: a.s.total + s.length },
b: { covered: a.b.covered + filter(b, identity).length, total: a.b.total + b.length }};
}, { l: { covered: 0, total: 0 }, s: { covered: 0, total: 0 }, b: { covered: 0, total: 0 }});
// Return the coverage percentages
return { l: t.l.covered / (t.l.total || 1) * 100, s: (t.s.covered + /*t.b.covered*/ 0) / (t.s.total + /*t.b.total*/ 0 || 1) * 100 };
};
// Colorify the report on a tty or when the command line says --colors,
// or when env variable COVERAGE_COLORS is configured
var colors = memoize(function() {
var enabled = function(c) { return c !== undefined && c !== '0' && c !== 'false' && c !== 'disabled' && c !== 'no'; };
return tty.isatty(process.stdout) || _.contains(process.argv, '--colors') || enabled(process.env.COVERAGE_COLORS);
});
// Report a failure and exit
var fail = function(msg) {
process.stderr.write(msg);
process.exit(1);
};
// Report overall code coverage from Istanbul coverage files
var runCLI = function() {
// Load the root package.json from the current directory
var root = JSON.parse(fs.readFileSync('package.json'));
// Collect all the individual json coverage reports for our modules
collect(root, function(err, collector) {
if(err) fail(util.format('Couldn\'t collect coverage files', err));
// Combine all the individual reports and write overall coverage
// reports in LCOV and JSON formats
var reporter = new istanbul.Reporter(undefined, '.coverage');
reporter.addAll(['lcovonly', 'json']);
reporter.write(collector, false, function(err) {
if(err) fail(util.format('Couldn\'t write coverage reports', err, '\n'));
// Compute and report overall line and statement coverage
var percent = percentages(collector.getFinalCoverage());
var fullcov = percent.l === 100 && percent.s === 100;
// Print overall code coverage percentages in green for 100%
// coverage and red under 100%
var color = colors() ? fullcov ? '\u001b[32m' : '\u001b[31m' : '';
var reset = colors() ? '\u001b[0m' : '';
process.stdout.write(util.format('\n%sOverall coverage lines %d\% statements %d\%%s\n\n', color, percent.l.toFixed(2), percent.s.toFixed(2), reset));
process.exit(0);
});
});
};
// Export our public functions
module.exports.runCLI = runCLI;
| stefanschneider/cf-abacus | tools/coverage/src/index.js | JavaScript | apache-2.0 | 5,626 |
// Copyright 2007-2015 Chris Patterson, Dru Sellers, Travis Smith, et. al.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit.AzureServiceBusTransport.Contexts
{
using System;
using System.Threading.Tasks;
using Microsoft.ServiceBus;
using Microsoft.ServiceBus.Messaging;
public interface ConnectionContext :
PipeContext
{
/// <summary>
/// The messaging factory initialized for the service bus
/// </summary>
Task<MessagingFactory> MessagingFactory { get; }
/// <summary>
/// The messaging factory initialized for the service bus
/// </summary>
Task<MessagingFactory> SessionMessagingFactory { get; }
/// <summary>
/// The namespace manager for the service bus
/// </summary>
Task<NamespaceManager> NamespaceManager { get; }
/// <summary>
/// The namespace manager at the root of the namespace
/// </summary>
Task<NamespaceManager> RootNamespaceManager { get; }
/// <summary>
/// Return the address for the specified queue
/// </summary>
/// <param name="queueDescription"></param>
/// <returns>The address of the queue</returns>
Uri GetQueueAddress(QueueDescription queueDescription);
/// <summary>
/// return the path of the queue for this connection
/// </summary>
/// <param name="queueDescription"></param>
/// <returns></returns>
string GetQueuePath(QueueDescription queueDescription);
}
} | D3-LucaPiombino/MassTransit | src/MassTransit.AzureServiceBusTransport/Contexts/ConnectionContext.cs | C# | apache-2.0 | 2,155 |
package in.notwork.calculator;
import java.io.Serializable;
import java.util.Objects;
/**
* @author rishabh.
*/
public class GeoPoint implements Serializable {
private double latitude;
private double longitude;
public GeoPoint() {
super();
}
public GeoPoint(double latitude, double longitude) {
this();
this.latitude = latitude;
this.longitude = longitude;
}
public double getLatitude() {
return latitude;
}
public void setLatitude(double latitude) {
this.latitude = latitude;
}
public double getLongitude() {
return longitude;
}
public void setLongitude(double longitude) {
this.longitude = longitude;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof GeoPoint)) return false;
GeoPoint geoPoint = (GeoPoint) o;
return Objects.equals(latitude, geoPoint.latitude) &&
Objects.equals(longitude, geoPoint.longitude);
}
@Override
public int hashCode() {
return Objects.hash(latitude, longitude);
}
@Override
public String toString() {
return "GeoPoint{" +
"latitude=" + latitude +
", longitude=" + longitude +
'}';
}
}
| rishabh9/distance-calculator | src/main/java/in/notwork/calculator/GeoPoint.java | Java | apache-2.0 | 1,339 |
/**
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.rasc.bsoncodec.model;
import javax.annotation.Nullable;
import javax.lang.model.element.VariableElement;
import org.immutables.value.Value;
import ch.rasc.bsoncodec.codegen.CodeGen;
@Value.Immutable
public abstract class FieldModel implements Comparable<FieldModel> {
public abstract VariableElement varEl();
@Value.Default
public String name() {
return this.varEl().getSimpleName().toString();
}
public abstract int order();
public abstract @Nullable String customCodecName();
public abstract CodeGen codeGen();
@Value.Default
public boolean storeNullValue() {
return false;
}
@Value.Default
public boolean storeEmptyCollection() {
return false;
}
@Value.Default
public boolean disableEncodeNullCheck() {
return this.idModel() != null || this.varEl().asType().getKind().isPrimitive();
}
@Value.Default
public boolean disableDecodeNullCheck() {
return this.idModel() != null;
}
@Value.Default
public boolean disableSetNullStatement() {
return true;
}
@Value.Default
public int fixedArray() {
return 0;
}
public abstract @Nullable IdModel idModel();
public abstract String methodNameSet();
public abstract String methodNameGet();
@Override
public int compareTo(FieldModel o) {
return Integer.compare(order(), o.order());
}
} | ralscha/bsoncodec-apt | src/main/java/ch/rasc/bsoncodec/model/FieldModel.java | Java | apache-2.0 | 1,920 |
/**
* Trim leading and trailing whitespace
* @return {String} Returns trimmed string
*/
String.prototype.trim = function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
}
/**
* Creates a new string utilizing placeholders defined in the source string
* @param {Object} values Array or object whose indices or properties correspond to placeholder names
* @exception {KeyNotFoundError} Key or property not found
* @exception {FormatError} Format was invalid
* @return {String} Returns formatted results
* @remarks Placeholders are defined by placing text inside curly brackets. To insert literal curly brackets, simply use 2 consecutive curly brackets.
* The text inside the curly brackets represents a property or index to obtain from the 'values' parameter.
* @example var values = { 1: "First", 2: "Second" };
* return "One is {1} and {{Two}} is {{{2}}}".toFormattedString(values); // results in "One is First and {Two} is {Second}"
*/
String.prototype.toFormattedString = function(values) {
var formatStr = String(this);
var result = '';
var re = /^([^{}]*)(\{+|\}+)(.*?)$/;
var rr = re.exec(formatStr);
var isInPlaceholder = false;
var placeHolderKey = '';
var position = 0;
while (rr != null) {
formatStr = rr[3];
var placeHolderLen = rr[2].length % 2;
if (isInPlaceholder) {
if (placeHolderLen == 1) {
if (rr[2].substr(0, 1) == '{')
throw new FormatError(undefined, "Unexpected opening brace", String(this), position + rr[1].length);
isInPlaceholder = false;
placeHolderKey += rr[1];
if (values === undefined || values === null)
throw new KeyNotFoundError(undefined, "values were not defined", placeHolderKey, String(this), position + rr[1].length);
var v;
try {
v = values[placeHolderKey];
} catch (err) {
throw new KeyNotFoundError(undefined, undefined, placeHolderKey, String(this), position + rr[1].length, err)
}
if (v === undefined)
throw new KeyNotFoundError(undefined, undefined, placeHolderKey, String(this), position + rr[1].length);
result += ((v === null) ? "" : String(v)) + rr[2].substr(0, (rr[2].length - placeHolderLen) / 2);
} else
placeHolderKey += rr[1] + rr[2].substr(0, (rr[2].length - placeHolderLen) / 2);
} else {
result += rr[1] + rr[2].substr(0, (rr[2].length - placeHolderLen) / 2);
if (placeHolderLen == 1) {
if (rr[2].substr(0, 1) == '}')
throw new FormatError(undefined, "Unexpected closing brace", String(this), position + rr[1].length);
isInPlaceholder = true;
}
}
position += r[1].length + r[2].length;
rr = re.exec(formatStr);
}
if (isInPlaceholder)
throw new FormatError(undefined, "Closing brace not found", String(this), position);
return result + formatStr;
} | lerwine/JSCookbook | src/TypeExtensions/StringExtensions.js | JavaScript | apache-2.0 | 2,780 |