code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/**
* @license
* Copyright 2019 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @fileoverview Blockly module for Node. It includes Blockly core,
* built-in blocks, all the generators and the English locale.
*/
/* eslint-disable */
'use strict';
// Include the EN Locale by default.
Blockly.setLocale(En);
| google/blockly | scripts/package/node/index.js | JavaScript | apache-2.0 | 339 |
package biz.aQute.resolve;
import static test.lib.Utils.createRepo;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.osgi.framework.namespace.IdentityNamespace;
import org.osgi.resource.Capability;
import org.osgi.resource.Requirement;
import org.osgi.resource.Resource;
import org.osgi.service.repository.Repository;
import org.osgi.service.resolver.Resolver;
import aQute.bnd.build.model.EE;
import aQute.bnd.osgi.resource.CapReqBuilder;
import aQute.bnd.version.Version;
import aQute.bnd.version.VersionRange;
import aQute.lib.io.IO;
import junit.framework.TestCase;
@SuppressWarnings("restriction")
public class GenericResolveContextResolveTest extends TestCase {
ResolverLogger logger = new ResolverLogger(0, System.out);
private String getTestName() {
return getClass().getName() + "/" + getName();
}
/**
* Simple basic resolve. We use a small index with gogo + framework and then
* try to see if we can resolve the runtime from the shell requirement.
*
* @throws Exception
*/
public void testSimpleResolve() throws Exception {
Repository repository = createRepo(IO.getFile("testdata/repo3.index.xml"), getTestName());
GenericResolveContext grc = new GenericResolveContext(logger);
grc.setLevel(2);
grc.addRepository(repository);
grc.addFramework("org.apache.felix.framework", null);
grc.addEE(EE.JavaSE_1_7);
grc.addRequireBundle("org.apache.felix.gogo.shell", new VersionRange("[0,1]"));
grc.done();
try (ResolverLogger logger = new ResolverLogger(4)) {
Resolver resolver = new BndResolver(new ResolverLogger(4));
Set<Resource> resources = resolver.resolve(grc)
.keySet();
assertNotNull(getResource(resources, "org.apache.felix.gogo.runtime", "0.10"));
}
}
/**
* Check default directive
*
* @throws Exception
*/
public void testResolveRequirementNoDirective() throws Exception {
Repository repository = createRepo(IO.getFile("testdata/repo6/index.xml"), getTestName());
GenericResolveContext grc = new GenericResolveContext(logger);
grc.setLevel(2);
grc.addRepository(repository);
Requirement logservice = new CapReqBuilder("osgi.service")
.addDirective("filter", "(objectClass=org.osgi.service.log.LogService)")
.buildSyntheticRequirement();
List<Capability> providers = grc.findProviders(logservice);
assertEquals(2, providers.size());
assertNames(providers, "test.a", "test.b");
}
/**
* Check expressly set directive
*
* @throws Exception
*/
public void testResolveRequirementResolveDirective() throws Exception {
Repository repository = createRepo(IO.getFile("testdata/repo6/index.xml"), getTestName());
GenericResolveContext grc = new GenericResolveContext(logger);
grc.addRepository(repository);
Requirement logservice = new CapReqBuilder("osgi.service")
.addDirective("filter", "(objectClass=org.osgi.service.log.LogService)")
.addDirective("effective", "resolve")
.buildSyntheticRequirement();
List<Capability> providers = grc.findProviders(logservice);
assertEquals(2, providers.size());
assertNames(providers, "test.a", "test.b");
}
public void testResolveRequirementActiveDirective() throws Exception {
Repository repository = createRepo(IO.getFile("testdata/repo6/index.xml"), getTestName());
GenericResolveContext grc = new GenericResolveContext(logger);
grc.addRepository(repository);
Requirement logservice = new CapReqBuilder("osgi.service")
.addDirective("filter", "(objectClass=org.osgi.service.log.LogService)")
.addDirective("effective", "active")
.buildSyntheticRequirement();
List<Capability> providers = grc.findProviders(logservice);
assertEquals(3, providers.size());
assertNames(providers, "test.a", "test.b", "test.c");
}
private static Resource getResource(Set<Resource> resources, String bsn, String versionString) {
for (Resource resource : resources) {
List<Capability> identities = resource.getCapabilities(IdentityNamespace.IDENTITY_NAMESPACE);
if (identities.size() == 1) {
Capability idCap = identities.get(0);
Object id = idCap.getAttributes()
.get(IdentityNamespace.IDENTITY_NAMESPACE);
Object version = idCap.getAttributes()
.get(IdentityNamespace.CAPABILITY_VERSION_ATTRIBUTE);
if (bsn.equals(id)) {
if (versionString == null) {
return resource;
}
Version requested = Version.parseVersion(versionString);
Version current;
if (version instanceof Version) {
current = (Version) version;
} else {
current = Version.parseVersion("" + version);
}
if (requested.equals(current)) {
return resource;
}
}
}
}
return null;
}
void assertNames(List<Capability> providers, String... ids) {
Set<String> resourceNames = new HashSet<>();
for (Capability cap : providers) {
resourceNames.add(cap.getResource()
.getCapabilities(IdentityNamespace.IDENTITY_NAMESPACE)
.get(0)
.getAttributes()
.get(IdentityNamespace.IDENTITY_NAMESPACE)
.toString());
}
Set<String> expectedResourceNames = new HashSet<>(Arrays.asList(ids));
assertEquals(expectedResourceNames, resourceNames);
}
}
| psoreide/bnd | biz.aQute.resolve/test/biz/aQute/resolve/GenericResolveContextResolveTest.java | Java | apache-2.0 | 5,184 |
/*
* Copyright 2018 ImpactDevelopment
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package clientapi.gui.widget.data;
/**
* Default implementations of {@link WidgetAlignment}
*
* @see WidgetAlignment
*
* @author Brady
* @since 5/28/2017
*/
public enum DefaultWidgetAlignment implements WidgetAlignment {
LEFT(-1.0F),
CENTERED(-0.5F),
RIGHT(0.0F);
private float value;
DefaultWidgetAlignment(float value) {
this.value = value;
}
@Override
public final float getValue() {
return this.value;
}
}
| ImpactDevelopment/ClientAPI | src/main/java/clientapi/gui/widget/data/DefaultWidgetAlignment.java | Java | apache-2.0 | 1,076 |
import { Component, Input } from '@angular/core';
import { User } from 'ngx-login-client';
@Component({
selector: 'f8-assignee',
templateUrl: './assignee.component.html',
styleUrls: ['./assignee.component.less'],
})
export class AssigneesComponent {
private assignees: User[] = [];
@Input() truncateAfter: number;
@Input() showFullName: boolean;
@Input('assignees') set assigneeInput(val) {
this.assignees = val;
}
@Input() overlapAvatar: boolean = false;
constructor() {}
}
| fabric8-ui/fabric8-ui | packages/planner/src/app/components_ngrx/assignee/assignee.component.ts | TypeScript | apache-2.0 | 502 |
package gwt.material.design.client.ui.html;
/*
* #%L
* GwtBootstrap3
* %%
* Copyright (C) 2013 GwtBootstrap3
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.gwt.dom.client.Document;
import gwt.material.design.client.base.AbstractTextWidget;
import gwt.material.design.client.base.helper.CodeHelper;
/**
* @author Ben Dol
*/
public class Code extends AbstractTextWidget {
public Code() {
super(Document.get().createElement("code"));
}
public Code(final String text) {
this();
setHTML(text);
}
@Override
public void setHTML(String html) {
this.getElement().setInnerHTML(html);
}
}
| GwtMaterialDesign/gwt-material | gwt-material/src/main/java/gwt/material/design/client/ui/html/Code.java | Java | apache-2.0 | 1,196 |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Copyright 2008 Google Inc. All Rights Reserved.
/**
* @fileoverview A class for managing the editor toolbar.
*
* @see ../../demos/editor/editor.html
*/
goog.provide('goog.ui.editor.ToolbarController');
goog.require('goog.editor.Field.EventType');
goog.require('goog.events.EventHandler');
goog.require('goog.events.EventTarget');
goog.require('goog.ui.Component.EventType');
/**
* A class for managing the editor toolbar. Acts as a bridge between
* a {@link goog.editor.Field} and a {@link goog.ui.Toolbar}.
*
* The {@code toolbar} argument must be an instance of {@link goog.ui.Toolbar}
* or a subclass. This class doesn't care how the toolbar was created. As
* long as one or more controls hosted in the toolbar have IDs that match
* built-in {@link goog.editor.Command}s, they will function as expected. It is
* the caller's responsibility to ensure that the toolbar is already rendered
* or that it decorates an existing element.
*
*
* @param {!goog.editor.Field} field Editable field to be controlled by the
* toolbar.
* @param {!goog.ui.Toolbar} toolbar Toolbar to control the editable field.
* @constructor
* @extends {goog.events.EventTarget}
*/
goog.ui.editor.ToolbarController = function(field, toolbar) {
goog.events.EventTarget.call(this);
/**
* Event handler to listen for field events and user actions.
* @type {!goog.events.EventHandler}
* @private
*/
this.handler_ = new goog.events.EventHandler(this);
/**
* The field instance controlled by the toolbar.
* @type {!goog.editor.Field}
* @private
*/
this.field_ = field;
/**
* The toolbar that controls the field.
* @type {!goog.ui.Toolbar}
* @private
*/
this.toolbar_ = toolbar;
/**
* Editing commands whose state is to be queried when updating the toolbar.
* @type {!Array.<string>}
* @private
*/
this.queryCommands_ = [];
// Iterate over all buttons, and find those which correspond to
// queryable commands. Add them to the list of commands to query on
// each COMMAND_VALUE_CHANGE event.
this.toolbar_.forEachChild(function(button) {
if (button.queryable) {
this.queryCommands_.push(this.getComponentId(button.getId()));
}
}, this);
// Make sure the toolbar doesn't steal keyboard focus.
this.toolbar_.setFocusable(false);
// Hook up handlers that update the toolbar in response to field events,
// and to execute editor commands in response to toolbar events.
this.handler_.
listen(this.field_, goog.editor.Field.EventType.COMMAND_VALUE_CHANGE,
this.updateToolbar).
listen(this.toolbar_, goog.ui.Component.EventType.ACTION,
this.handleAction);
};
goog.inherits(goog.ui.editor.ToolbarController, goog.events.EventTarget);
/**
* Returns the Closure component ID of the control that corresponds to the
* given {@link goog.editor.Command} constant.
* Subclasses may override this method if they want to use a custom mapping
* scheme from commands to controls.
* @param {string} command Editor command.
* @return {string} Closure component ID of the corresponding toolbar
* control, if any.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.getComponentId = function(command) {
// The default implementation assumes that the component ID is the same as
// the command constant.
return command;
};
/**
* Returns the {@link goog.editor.Command} constant
* that corresponds to the given Closure component ID. Subclasses may override
* this method if they want to use a custom mapping scheme from controls to
* commands.
* @param {string} id Closure component ID of a toolbar control.
* @return {string} Editor command or dialog constant corresponding to the
* toolbar control, if any.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.getCommand = function(id) {
// The default implementation assumes that the component ID is the same as
// the command constant.
return id;
};
/**
* Returns the event handler object for the editor toolbar. Useful for classes
* that extend {@code goog.ui.editor.ToolbarController}.
* @return {!goog.events.EventHandler} The event handler object.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.getHandler = function() {
return this.handler_;
};
/**
* Returns the field instance managed by the toolbar. Useful for
* classes that extend {@code goog.ui.editor.ToolbarController}.
* @return {!goog.editor.Field} The field managed by the toolbar.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.getField = function() {
return this.field_;
};
/**
* Returns the toolbar UI component that manages the editor. Useful for
* classes that extend {@code goog.ui.editor.ToolbarController}.
* @return {!goog.ui.Toolbar} The toolbar UI component.
*/
goog.ui.editor.ToolbarController.prototype.getToolbar = function() {
return this.toolbar_;
};
/**
* @return {boolean} Whether the toolbar is visible.
*/
goog.ui.editor.ToolbarController.prototype.isVisible = function() {
return this.toolbar_.isVisible();
};
/**
* Shows or hides the toolbar.
* @param {boolean} visible Whether to show or hide the toolbar.
*/
goog.ui.editor.ToolbarController.prototype.setVisible = function(visible) {
this.toolbar_.setVisible(visible);
};
/**
* @return {boolean} Whether the toolbar is enabled.
*/
goog.ui.editor.ToolbarController.prototype.isEnabled = function() {
return this.toolbar_.isEnabled();
};
/**
* Enables or disables the toolbar.
* @param {boolean} enabled Whether to enable or disable the toolbar.
*/
goog.ui.editor.ToolbarController.prototype.setEnabled = function(enabled) {
this.toolbar_.setEnabled(enabled);
};
/**
* Programmatically blurs the editor toolbar, un-highlighting the currently
* highlighted item, and closing the currently open menu (if any).
*/
goog.ui.editor.ToolbarController.prototype.blur = function() {
// We can't just call this.toolbar_.getElement().blur(), because the toolbar
// element itself isn't focusable, so goog.ui.Container#handleBlur isn't
// registered to handle blur events.
this.toolbar_.handleBlur(null);
};
/** @inheritDoc */
goog.ui.editor.ToolbarController.prototype.disposeInternal = function() {
goog.ui.editor.ToolbarController.superClass_.disposeInternal.call(this);
if (this.handler_) {
this.handler_.dispose();
delete this.handler_;
}
if (this.toolbar_) {
this.toolbar_.dispose();
delete this.toolbar_;
}
delete this.field_;
delete this.queryCommands_;
};
/**
* Updates the toolbar in response to editor events. Specifically, updates
* button states based on {@code COMMAND_VALUE_CHANGE} events, reflecting the
* effective formatting of the selection.
* @param {goog.events.Event} e Editor event to handle.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.updateToolbar = function(e) {
if (!this.toolbar_.isEnabled() ||
!this.dispatchEvent(goog.ui.Component.EventType.CHANGE)) {
return;
}
var state;
/** @preserveTry */
try {
/** @type {Array.<string>} */
e.commands; // Added by dispatchEvent.
// If the COMMAND_VALUE_CHANGE event specifies which commands changed
// state, then we only need to update those ones, otherwise update all
// commands.
state = /** @type {Object} */ (
this.field_.queryCommandValue(e.commands || this.queryCommands_));
} catch (ex) {
// TODO: Find out when/why this happens.
state = {};
}
this.updateToolbarFromState(state);
};
/**
* Updates the toolbar to reflect a given state.
* @param {Object} state Object mapping editor commands to values.
*/
goog.ui.editor.ToolbarController.prototype.updateToolbarFromState =
function(state) {
for (var command in state) {
var button = this.toolbar_.getChild(this.getComponentId(command));
if (button) {
var value = state[command];
if (button.updateFromValue) {
button.updateFromValue(value);
} else {
button.setChecked(!!value);
}
}
}
};
/**
* Handles {@code ACTION} events dispatched by toolbar buttons in response to
* user actions by executing the corresponding field command.
* @param {goog.events.Event} e Action event to handle.
* @protected
*/
goog.ui.editor.ToolbarController.prototype.handleAction = function(e) {
var command = this.getCommand(e.target.getId());
this.field_.execCommand(command, e.target.getValue());
};
| jay-hodgson/SynapseWebClient | src/main/webapp/js/goog/ui/editor/toolbarcontroller.js | JavaScript | apache-2.0 | 9,007 |
/*
* Copyright (c) 2017. HSJ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hsj.common.rxbus;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @Author:HSJ
* @E-mail:shengjunhu@foxmail.com
* @Date:2018/2/25/14:18
* @Version:V1.0
* @Class:BusThead
* @Description:线程注解
*/
@Documented
@Inherited
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.SOURCE)
public @interface BusThead {
String CURRENT_THREAD = "current_thread";
String UI_THEAD = "ui_thread";
String MAIN_THEAD = "main_thread";
String NEW_THEAD = "new_thread";
String IO_THEAD = "io_thread";
} | ShengJunHu/FastAndroid | library/common/src/main/java/com/hsj/common/rxbus/BusThead.java | Java | apache-2.0 | 1,429 |
/*
* Copyright (C) 2009 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.CollectPreconditions.checkEntryNotNull;
import com.google.common.annotations.Beta;
import java.util.Comparator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Function;
import java.util.stream.Collector;
/**
* GWT emulation of {@link com.google.common.collect.ImmutableBiMap}.
*
* @author Hayward Chan
*/
public abstract class ImmutableBiMap<K, V> extends ForwardingImmutableMap<K, V>
implements BiMap<K, V> {
@Beta
public static <T, K, V> Collector<T, ?, ImmutableBiMap<K, V>> toImmutableBiMap(
Function<? super T, ? extends K> keyFunction,
Function<? super T, ? extends V> valueFunction) {
return CollectCollectors.toImmutableBiMap(keyFunction, valueFunction);
}
// Casting to any type is safe because the set will never hold any elements.
@SuppressWarnings("unchecked")
public static <K, V> ImmutableBiMap<K, V> of() {
return (ImmutableBiMap<K, V>) RegularImmutableBiMap.EMPTY;
}
public static <K, V> ImmutableBiMap<K, V> of(K k1, V v1) {
checkEntryNotNull(k1, v1);
return new SingletonImmutableBiMap<K, V>(k1, v1);
}
public static <K, V> ImmutableBiMap<K, V> of(K k1, V v1, K k2, V v2) {
return new RegularImmutableBiMap<K, V>(ImmutableMap.of(k1, v1, k2, v2));
}
public static <K, V> ImmutableBiMap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3) {
return new RegularImmutableBiMap<K, V>(ImmutableMap.of(
k1, v1, k2, v2, k3, v3));
}
public static <K, V> ImmutableBiMap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) {
return new RegularImmutableBiMap<K, V>(ImmutableMap.of(
k1, v1, k2, v2, k3, v3, k4, v4));
}
public static <K, V> ImmutableBiMap<K, V> of(
K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) {
return new RegularImmutableBiMap<K, V>(ImmutableMap.of(
k1, v1, k2, v2, k3, v3, k4, v4, k5, v5));
}
public static <K, V> Builder<K, V> builder() {
return new Builder<K, V>();
}
public static final class Builder<K, V> extends ImmutableMap.Builder<K, V> {
public Builder() {}
Builder(int initCapacity) {
super(initCapacity);
}
@Override public Builder<K, V> put(K key, V value) {
super.put(key, value);
return this;
}
@Override public Builder<K, V> put(Map.Entry<? extends K, ? extends V> entry) {
super.put(entry);
return this;
}
@Override public Builder<K, V> putAll(Map<? extends K, ? extends V> map) {
super.putAll(map);
return this;
}
@Override public Builder<K, V> putAll(
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
super.putAll(entries);
return this;
}
public Builder<K, V> orderEntriesByValue(Comparator<? super V> valueComparator) {
super.orderEntriesByValue(valueComparator);
return this;
}
Builder<K, V> combine(Builder<K, V> other) {
super.combine(other);
return this;
}
@Override public ImmutableBiMap<K, V> build() {
ImmutableMap<K, V> map = super.build();
if (map.isEmpty()) {
return of();
}
return new RegularImmutableBiMap<K, V>(super.build());
}
}
public static <K, V> ImmutableBiMap<K, V> copyOf(
Map<? extends K, ? extends V> map) {
if (map instanceof ImmutableBiMap) {
@SuppressWarnings("unchecked") // safe since map is not writable
ImmutableBiMap<K, V> bimap = (ImmutableBiMap<K, V>) map;
return bimap;
}
if (map.isEmpty()) {
return of();
}
ImmutableMap<K, V> immutableMap = ImmutableMap.copyOf(map);
return new RegularImmutableBiMap<K, V>(immutableMap);
}
public static <K, V> ImmutableBiMap<K, V> copyOf(
Iterable<? extends Entry<? extends K, ? extends V>> entries) {
return new Builder<K, V>().putAll(entries).build();
}
ImmutableBiMap(Map<K, V> delegate) {
super(delegate);
}
public abstract ImmutableBiMap<V, K> inverse();
@Override public ImmutableSet<V> values() {
return inverse().keySet();
}
public final V forcePut(K key, V value) {
throw new UnsupportedOperationException();
}
}
| DavesMan/guava | guava-gwt/src-super/com/google/common/collect/super/com/google/common/collect/ImmutableBiMap.java | Java | apache-2.0 | 4,820 |
# -*- coding: utf-8 -*-
"""
CSS Selectors based on XPath
============================
This module supports selecting XML/HTML elements based on CSS selectors.
See the `CSSSelector` class for details.
:copyright: (c) 2007-2012 Ian Bicking and contributors.
See AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from cssselect.parser import (parse, Selector, FunctionalPseudoElement,
SelectorError, SelectorSyntaxError)
from cssselect.xpath import GenericTranslator, HTMLTranslator, ExpressionError
VERSION = '1.0.1'
__version__ = VERSION
| frvannes16/Cops-Robbers-Coding-Challenge | src/competition_code/libs/cssselect/__init__.py | Python | apache-2.0 | 639 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection.PortableExecutable;
using System.Text;
using System.Threading;
using Microsoft.CodeAnalysis;
using Roslyn.Utilities;
using EmitContext = Microsoft.CodeAnalysis.Emit.EmitContext;
namespace Microsoft.Cci
{
internal sealed class PeWriter
{
/// <summary>
/// True if we should attempt to generate a deterministic output (no timestamps or random data).
/// </summary>
private readonly bool _deterministic;
private readonly IModule _module;
private readonly string _pdbPathOpt;
private readonly bool _emitRuntimeStartupStub;
private readonly int _sizeOfImportAddressTable;
private MemoryStream _headerStream = new MemoryStream(1024);
private readonly MemoryStream _emptyStream = new MemoryStream(0);
private readonly NtHeader _ntHeader = new NtHeader();
private readonly BinaryWriter _rdataWriter = new BinaryWriter(new MemoryStream());
private readonly BinaryWriter _sdataWriter = new BinaryWriter(new MemoryStream());
private readonly BinaryWriter _tlsDataWriter = new BinaryWriter(new MemoryStream());
private readonly BinaryWriter _win32ResourceWriter = new BinaryWriter(new MemoryStream(1024));
private readonly BinaryWriter _coverageDataWriter = new BinaryWriter(new MemoryStream());
private SectionHeader _coverSection;
private SectionHeader _relocSection;
private SectionHeader _resourceSection;
private SectionHeader _rdataSection;
private SectionHeader _sdataSection;
private SectionHeader _textSection;
private SectionHeader _tlsSection;
private PeWriter(IModule module, string pdbPathOpt, bool deterministic)
{
_module = module;
_emitRuntimeStartupStub = module.RequiresStartupStub;
_pdbPathOpt = pdbPathOpt;
_deterministic = deterministic;
_sizeOfImportAddressTable = _emitRuntimeStartupStub ? (!_module.Requires64bits ? 8 : 16) : 0;
}
private bool EmitPdb => _pdbPathOpt != null;
public static bool WritePeToStream(
EmitContext context,
CommonMessageProvider messageProvider,
Func<Stream> getPeStream,
PdbWriter nativePdbWriterOpt,
string pdbPathOpt,
bool allowMissingMethodBodies,
bool deterministic,
CancellationToken cancellationToken)
{
// If PDB writer is given, we have to have PDB path.
Debug.Assert(nativePdbWriterOpt == null || pdbPathOpt != null);
var peWriter = new PeWriter(context.Module, pdbPathOpt, deterministic);
var mdWriter = FullMetadataWriter.Create(context, messageProvider, allowMissingMethodBodies, deterministic, cancellationToken);
return peWriter.WritePeToStream(mdWriter, getPeStream, nativePdbWriterOpt);
}
private bool WritePeToStream(MetadataWriter mdWriter, Func<Stream> getPeStream, PdbWriter nativePdbWriterOpt)
{
// TODO: we can precalculate the exact size of IL stream
var ilBuffer = new MemoryStream(32 * 1024);
var ilWriter = new BinaryWriter(ilBuffer);
var metadataBuffer = new MemoryStream(16 * 1024);
var metadataWriter = new BinaryWriter(metadataBuffer);
var mappedFieldDataBuffer = new MemoryStream();
var mappedFieldDataWriter = new BinaryWriter(mappedFieldDataBuffer);
var managedResourceBuffer = new MemoryStream(1024);
var managedResourceWriter = new BinaryWriter(managedResourceBuffer);
nativePdbWriterOpt?.SetMetadataEmitter(mdWriter);
// Since we are producing a full assembly, we should not have a module version ID
// imposed ahead-of time. Instead we will compute a deterministic module version ID
// based on the contents of the generated stream.
Debug.Assert(_module.PersistentIdentifier == default(Guid));
uint moduleVersionIdOffsetInMetadataStream;
var calculateMethodBodyStreamRva = new Func<MetadataSizes, int>(mdSizes =>
{
FillInTextSectionHeader(mdSizes);
return (int)_textSection.RelativeVirtualAddress + _sizeOfImportAddressTable + 72;
});
MetadataSizes metadataSizes;
uint entryPointToken;
mdWriter.SerializeMetadataAndIL(
nativePdbWriterOpt,
metadataWriter,
ilWriter,
mappedFieldDataWriter,
managedResourceWriter,
calculateMethodBodyStreamRva,
CalculateMappedFieldDataStreamRva,
out moduleVersionIdOffsetInMetadataStream,
out entryPointToken,
out metadataSizes);
ContentId pdbContentId;
if (nativePdbWriterOpt != null)
{
if (entryPointToken != 0)
{
nativePdbWriterOpt.SetEntryPoint(entryPointToken);
}
var assembly = _module.AsAssembly;
if (assembly != null && assembly.Kind == ModuleKind.WindowsRuntimeMetadata)
{
// Dev12: If compiling to winmdobj, we need to add to PDB source spans of
// all types and members for better error reporting by WinMDExp.
nativePdbWriterOpt.WriteDefinitionLocations(_module.GetSymbolToLocationMap());
}
else
{
#if DEBUG
// validate that all definitions are writeable
// if same scenario would happen in an winmdobj project
nativePdbWriterOpt.AssertAllDefinitionsHaveTokens(_module.GetSymbolToLocationMap());
#endif
}
pdbContentId = nativePdbWriterOpt.GetContentId();
// the writer shall not be used after this point for writing:
nativePdbWriterOpt = null;
}
else
{
pdbContentId = default(ContentId);
}
FillInSectionHeaders();
// fill in header fields.
FillInNtHeader(metadataSizes, CalculateMappedFieldDataStreamRva(metadataSizes));
var corHeader = CreateCorHeader(metadataSizes, entryPointToken);
// write to pe stream.
Stream peStream = getPeStream();
if (peStream == null)
{
return false;
}
long ntHeaderTimestampPosition;
long metadataPosition;
WriteHeaders(peStream, out ntHeaderTimestampPosition);
WriteTextSection(
peStream,
corHeader,
metadataBuffer,
ilBuffer,
mappedFieldDataBuffer,
managedResourceBuffer,
metadataSizes,
pdbContentId,
out metadataPosition);
WriteRdataSection(peStream);
WriteSdataSection(peStream);
WriteCoverSection(peStream);
WriteTlsSection(peStream);
WriteResourceSection(peStream);
WriteRelocSection(peStream);
if (_deterministic)
{
var mvidPosition = metadataPosition + moduleVersionIdOffsetInMetadataStream;
WriteDeterministicGuidAndTimestamps(peStream, mvidPosition, ntHeaderTimestampPosition);
}
return true;
}
private int CalculateMappedFieldDataStreamRva(MetadataSizes metadataSizes)
{
FillInTextSectionHeader(metadataSizes);
Debug.Assert(metadataSizes.MappedFieldDataSize % MetadataWriter.MappedFieldDataAlignment == 0);
return (int)(_textSection.RelativeVirtualAddress + _textSection.VirtualSize - metadataSizes.MappedFieldDataSize);
}
/// <summary>
/// Compute a deterministic Guid and timestamp based on the contents of the stream, and replace
/// the 16 zero bytes at the given position and one or two 4-byte values with that computed Guid and timestamp.
/// </summary>
/// <param name="peStream">PE stream.</param>
/// <param name="mvidPosition">Position in the stream of 16 zero bytes to be replaced by a Guid</param>
/// <param name="ntHeaderTimestampPosition">Position in the stream of four zero bytes to be replaced by a timestamp</param>
private static void WriteDeterministicGuidAndTimestamps(
Stream peStream,
long mvidPosition,
long ntHeaderTimestampPosition)
{
Debug.Assert(mvidPosition != 0);
Debug.Assert(ntHeaderTimestampPosition != 0);
var previousPosition = peStream.Position;
// Compute and write deterministic guid data over the relevant portion of the stream
peStream.Position = 0;
var contentId = ContentId.FromHash(CryptographicHashProvider.ComputeSha1(peStream));
// The existing Guid should be zero.
CheckZeroDataInStream(peStream, mvidPosition, contentId.Guid.Length);
peStream.Position = mvidPosition;
peStream.Write(contentId.Guid, 0, contentId.Guid.Length);
// The existing timestamp should be zero.
CheckZeroDataInStream(peStream, ntHeaderTimestampPosition, contentId.Stamp.Length);
peStream.Position = ntHeaderTimestampPosition;
peStream.Write(contentId.Stamp, 0, contentId.Stamp.Length);
peStream.Position = previousPosition;
}
[Conditional("DEBUG")]
private static void CheckZeroDataInStream(Stream stream, long position, int bytes)
{
stream.Position = position;
for (int i = 0; i < bytes; i++)
{
int value = stream.ReadByte();
Debug.Assert(value == 0);
}
}
private int ComputeStrongNameSignatureSize()
{
IAssembly assembly = _module.AsAssembly;
if (assembly == null)
{
return 0;
}
// EDMAURER the count of characters divided by two because the each pair of characters will turn in to one byte.
int keySize = (assembly.SignatureKey == null) ? 0 : assembly.SignatureKey.Length / 2;
if (keySize == 0)
{
keySize = assembly.PublicKey.Length;
}
if (keySize == 0)
{
return 0;
}
return (keySize < 128 + 32) ? 128 : keySize - 32;
}
private int ComputeOffsetToDebugTable(MetadataSizes metadataSizes)
{
return
ComputeOffsetToMetadata(metadataSizes.ILStreamSize) +
metadataSizes.MetadataSize +
metadataSizes.ResourceDataSize +
ComputeStrongNameSignatureSize(); // size of strong name hash
}
private int ComputeOffsetToImportTable(MetadataSizes metadataSizes)
{
// TODO: add size of unmanaged export stubs (when and if these are ever supported).
return
ComputeOffsetToDebugTable(metadataSizes) +
ComputeSizeOfDebugDirectory();
}
private int ComputeOffsetToMetadata(int ilStreamLength)
{
return
_sizeOfImportAddressTable +
72 + // size of CLR header
BitArithmeticUtilities.Align(ilStreamLength, 4);
}
private const int ImageDebugDirectoryBaseSize =
sizeof(uint) + // Characteristics
sizeof(uint) + // TimeDataStamp
sizeof(uint) + // Version
sizeof(uint) + // Type
sizeof(uint) + // SizeOfData
sizeof(uint) + // AddressOfRawData
sizeof(uint); // PointerToRawData
private int ComputeSizeOfDebugDirectoryData()
{
return
4 + // 4B signature "RSDS"
16 + // GUID
sizeof(uint) + // Age
Encoding.UTF8.GetByteCount(_pdbPathOpt) +
1; // Null terminator
}
private int ComputeSizeOfDebugDirectory()
{
return EmitPdb ? ImageDebugDirectoryBaseSize + ComputeSizeOfDebugDirectoryData() : 0;
}
private uint ComputeSizeOfPeHeaders()
{
ushort numberOfSections = 1; // .text
if (_emitRuntimeStartupStub) numberOfSections++; //.reloc
if (_tlsDataWriter.BaseStream.Length > 0) numberOfSections++; //.tls
if (_rdataWriter.BaseStream.Length > 0) numberOfSections++; //.rdata
if (_sdataWriter.BaseStream.Length > 0) numberOfSections++; //.sdata
if (_coverageDataWriter.BaseStream.Length > 0) numberOfSections++; //.cover
if (!IteratorHelper.EnumerableIsEmpty(_module.Win32Resources) ||
_module.Win32ResourceSection != null)
numberOfSections++; //.rsrc;
_ntHeader.NumberOfSections = numberOfSections;
uint sizeOfPeHeaders = 128 + 4 + 20 + 224 + 40u * numberOfSections;
if (_module.Requires64bits)
{
sizeOfPeHeaders += 16;
}
return sizeOfPeHeaders;
}
private int ComputeSizeOfTextSection(MetadataSizes metadataSizes)
{
int textSectionLength = this.ComputeOffsetToImportTable(metadataSizes);
if (_emitRuntimeStartupStub)
{
textSectionLength += !_module.Requires64bits ? 66 : 70; //size of import table
textSectionLength += 14; //size of name table
textSectionLength = BitArithmeticUtilities.Align(textSectionLength, !_module.Requires64bits ? 4 : 8); //optional padding to make startup stub's target address align on word or double word boundary
textSectionLength += !_module.Requires64bits ? 8 : 16; //fixed size of runtime startup stub
}
Debug.Assert(metadataSizes.MappedFieldDataSize % MetadataWriter.MappedFieldDataAlignment == 0);
textSectionLength += metadataSizes.MappedFieldDataSize;
return textSectionLength;
}
private uint ComputeSizeOfWin32Resources(uint resourcesRva)
{
this.SerializeWin32Resources(resourcesRva);
uint result = 0;
if (_win32ResourceWriter.BaseStream.Length > 0)
{
result += BitArithmeticUtilities.Align(_win32ResourceWriter.BaseStream.Length, 4);
} // result += Align(this.win32ResourceWriter.BaseStream.Length+1, 8);
return result;
}
private CorHeader CreateCorHeader(MetadataSizes metadataSizes, uint entryPointToken)
{
CorHeader corHeader = new CorHeader();
corHeader.CodeManagerTable.RelativeVirtualAddress = 0;
corHeader.CodeManagerTable.Size = 0;
corHeader.EntryPointToken = entryPointToken;
corHeader.ExportAddressTableJumps.RelativeVirtualAddress = 0;
corHeader.ExportAddressTableJumps.Size = 0;
corHeader.Flags = this.GetCorHeaderFlags();
corHeader.MajorRuntimeVersion = 2;
corHeader.MetadataDirectory.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToMetadata(metadataSizes.ILStreamSize);
corHeader.MetadataDirectory.Size = (uint)metadataSizes.MetadataSize;
corHeader.MinorRuntimeVersion = 5;
corHeader.Resources.RelativeVirtualAddress = corHeader.MetadataDirectory.RelativeVirtualAddress + corHeader.MetadataDirectory.Size;
corHeader.Resources.Size = (uint)metadataSizes.ResourceDataSize;
corHeader.StrongNameSignature.RelativeVirtualAddress = corHeader.Resources.RelativeVirtualAddress + corHeader.Resources.Size;
corHeader.StrongNameSignature.Size = (uint)ComputeStrongNameSignatureSize();
corHeader.VTableFixups.RelativeVirtualAddress = 0;
corHeader.VTableFixups.Size = 0;
return corHeader;
}
private void FillInNtHeader(MetadataSizes metadataSizes, int mappedFieldDataStreamRva)
{
bool use32bitAddresses = !_module.Requires64bits;
NtHeader ntHeader = _ntHeader;
ntHeader.AddressOfEntryPoint = _emitRuntimeStartupStub ? (uint)mappedFieldDataStreamRva - (use32bitAddresses ? 6u : 10u) : 0;
ntHeader.BaseOfCode = _textSection.RelativeVirtualAddress;
ntHeader.BaseOfData = _rdataSection.RelativeVirtualAddress;
ntHeader.PointerToSymbolTable = 0;
ntHeader.SizeOfCode = _textSection.SizeOfRawData;
ntHeader.SizeOfInitializedData = _rdataSection.SizeOfRawData + _coverSection.SizeOfRawData + _sdataSection.SizeOfRawData + _tlsSection.SizeOfRawData + _resourceSection.SizeOfRawData + _relocSection.SizeOfRawData;
ntHeader.SizeOfHeaders = BitArithmeticUtilities.Align(this.ComputeSizeOfPeHeaders(), _module.FileAlignment);
ntHeader.SizeOfImage = BitArithmeticUtilities.Align(_relocSection.RelativeVirtualAddress + _relocSection.VirtualSize, 0x2000);
ntHeader.SizeOfUninitializedData = 0;
// In the PE File Header this is a "Time/Date Stamp" whose description is "Time and date
// the file was created in seconds since January 1st 1970 00:00:00 or 0"
// However, when we want to make it deterministic we fill it in (later) with bits from the hash of the full PE file.
ntHeader.TimeDateStamp = _deterministic ? 0 : (uint)(DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds;
ntHeader.ImportAddressTable.RelativeVirtualAddress = (_emitRuntimeStartupStub) ? _textSection.RelativeVirtualAddress : 0;
ntHeader.ImportAddressTable.Size = (uint)_sizeOfImportAddressTable;
ntHeader.CliHeaderTable.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + ntHeader.ImportAddressTable.Size;
ntHeader.CliHeaderTable.Size = 72;
ntHeader.ImportTable.RelativeVirtualAddress = _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToImportTable(metadataSizes);
if (!_emitRuntimeStartupStub)
{
ntHeader.ImportTable.Size = 0;
ntHeader.ImportTable.RelativeVirtualAddress = 0;
}
else
{
ntHeader.ImportTable.Size = use32bitAddresses ? 66u : 70u;
ntHeader.ImportTable.Size += 13; //size of nametable
}
ntHeader.BaseRelocationTable.RelativeVirtualAddress = (_emitRuntimeStartupStub) ? _relocSection.RelativeVirtualAddress : 0;
ntHeader.BaseRelocationTable.Size = _relocSection.VirtualSize;
ntHeader.BoundImportTable.RelativeVirtualAddress = 0;
ntHeader.BoundImportTable.Size = 0;
ntHeader.CertificateTable.RelativeVirtualAddress = 0;
ntHeader.CertificateTable.Size = 0;
ntHeader.CopyrightTable.RelativeVirtualAddress = 0;
ntHeader.CopyrightTable.Size = 0;
ntHeader.DebugTable.RelativeVirtualAddress = EmitPdb ? _textSection.RelativeVirtualAddress + (uint)ComputeOffsetToDebugTable(metadataSizes) : 0u;
ntHeader.DebugTable.Size = EmitPdb ? ImageDebugDirectoryBaseSize : 0u; // Only the size of the fixed part of the debug table goes here.
ntHeader.DelayImportTable.RelativeVirtualAddress = 0;
ntHeader.DelayImportTable.Size = 0;
ntHeader.ExceptionTable.RelativeVirtualAddress = 0;
ntHeader.ExceptionTable.Size = 0;
ntHeader.ExportTable.RelativeVirtualAddress = 0;
ntHeader.ExportTable.Size = 0;
ntHeader.GlobalPointerTable.RelativeVirtualAddress = 0;
ntHeader.GlobalPointerTable.Size = 0;
ntHeader.LoadConfigTable.RelativeVirtualAddress = 0;
ntHeader.LoadConfigTable.Size = 0;
ntHeader.Reserved.RelativeVirtualAddress = 0;
ntHeader.Reserved.Size = 0;
ntHeader.ResourceTable.RelativeVirtualAddress = _resourceSection.SizeOfRawData == 0 ? 0u : _resourceSection.RelativeVirtualAddress;
ntHeader.ResourceTable.Size = _resourceSection.VirtualSize;
ntHeader.ThreadLocalStorageTable.RelativeVirtualAddress = _tlsSection.SizeOfRawData == 0 ? 0u : _tlsSection.RelativeVirtualAddress;
ntHeader.ThreadLocalStorageTable.Size = _tlsSection.SizeOfRawData;
}
private void FillInTextSectionHeader(MetadataSizes metadataSizes)
{
if (_textSection == null)
{
uint sizeOfPeHeaders = (uint)ComputeSizeOfPeHeaders();
uint sizeOfTextSection = (uint)ComputeSizeOfTextSection(metadataSizes);
_textSection = new SectionHeader
{
Characteristics = 0x60000020, // section is read + execute + code
Name = ".text",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = BitArithmeticUtilities.Align(sizeOfPeHeaders, _module.FileAlignment),
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(sizeOfPeHeaders, 0x2000),
SizeOfRawData = BitArithmeticUtilities.Align(sizeOfTextSection, _module.FileAlignment),
VirtualSize = sizeOfTextSection
};
}
}
private void FillInSectionHeaders()
{
_rdataSection = new SectionHeader
{
Characteristics = 0x40000040, // section is read + initialized
Name = ".rdata",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _textSection.PointerToRawData + _textSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(_textSection.RelativeVirtualAddress + _textSection.VirtualSize, 0x2000),
SizeOfRawData = BitArithmeticUtilities.Align(_rdataWriter.BaseStream.Length, _module.FileAlignment),
VirtualSize = _rdataWriter.BaseStream.Length,
};
_sdataSection = new SectionHeader
{
Characteristics = 0xC0000040, // section is write + read + initialized
Name = ".sdata",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _rdataSection.PointerToRawData + _rdataSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(_rdataSection.RelativeVirtualAddress + _rdataSection.VirtualSize, 0x2000),
SizeOfRawData = BitArithmeticUtilities.Align(_sdataWriter.BaseStream.Length, _module.FileAlignment),
VirtualSize = _sdataWriter.BaseStream.Length,
};
_coverSection = new SectionHeader
{
Characteristics = 0xC8000040, // section is not paged + write + read + initialized
Name = ".cover",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _sdataSection.PointerToRawData + _sdataSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(_sdataSection.RelativeVirtualAddress + _sdataSection.VirtualSize, 0x2000),
SizeOfRawData = BitArithmeticUtilities.Align(_coverageDataWriter.BaseStream.Length, _module.FileAlignment),
VirtualSize = _coverageDataWriter.BaseStream.Length,
};
_tlsSection = new SectionHeader
{
Characteristics = 0xC0000040, // section is write + read + initialized
Name = ".tls",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _coverSection.PointerToRawData + _coverSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(_coverSection.RelativeVirtualAddress + _coverSection.VirtualSize, 0x2000),
SizeOfRawData = BitArithmeticUtilities.Align(_tlsDataWriter.BaseStream.Length, _module.FileAlignment),
VirtualSize = _tlsDataWriter.BaseStream.Length,
};
uint resourcesRva = BitArithmeticUtilities.Align(_tlsSection.RelativeVirtualAddress + _tlsSection.VirtualSize, 0x2000);
uint sizeOfWin32Resources = this.ComputeSizeOfWin32Resources(resourcesRva);
_resourceSection = new SectionHeader
{
Characteristics = 0x40000040, // section is read + initialized
Name = ".rsrc",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _tlsSection.PointerToRawData + _tlsSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = resourcesRva,
SizeOfRawData = BitArithmeticUtilities.Align(sizeOfWin32Resources, _module.FileAlignment),
VirtualSize = sizeOfWin32Resources,
};
_relocSection = new SectionHeader
{
Characteristics = 0x42000040, // section is read + discardable + initialized
Name = ".reloc",
NumberOfLinenumbers = 0,
NumberOfRelocations = 0,
PointerToLinenumbers = 0,
PointerToRawData = _resourceSection.PointerToRawData + _resourceSection.SizeOfRawData,
PointerToRelocations = 0,
RelativeVirtualAddress = BitArithmeticUtilities.Align(_resourceSection.RelativeVirtualAddress + _resourceSection.VirtualSize, 0x2000),
SizeOfRawData = _emitRuntimeStartupStub ? _module.FileAlignment : 0,
VirtualSize = _emitRuntimeStartupStub ? (_module.Requires64bits && !_module.RequiresAmdInstructionSet ? 14u : 12u) : 0,
};
}
private CorFlags GetCorHeaderFlags()
{
CorFlags result = 0;
if (_module.ILOnly)
{
result |= CorFlags.ILOnly;
}
if (_module.Requires32bits)
{
result |= CorFlags.Requires32Bit;
}
if (_module.StrongNameSigned)
{
result |= CorFlags.StrongNameSigned;
}
if (_module.TrackDebugData)
{
result |= CorFlags.TrackDebugData;
}
if (_module.Prefers32bits)
{
result |= CorFlags.Requires32Bit | CorFlags.Prefers32Bit;
}
return result;
}
////
//// Resource Format.
////
////
//// Resource directory consists of two counts, following by a variable length
//// array of directory entries. The first count is the number of entries at
//// beginning of the array that have actual names associated with each entry.
//// The entries are in ascending order, case insensitive strings. The second
//// count is the number of entries that immediately follow the named entries.
//// This second count identifies the number of entries that have 16-bit integer
//// Ids as their name. These entries are also sorted in ascending order.
////
//// This structure allows fast lookup by either name or number, but for any
//// given resource entry only one form of lookup is supported, not both.
//// This is consistant with the syntax of the .RC file and the .RES file.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY {
// DWORD Characteristics;
// DWORD TimeDateStamp;
// WORD MajorVersion;
// WORD MinorVersion;
// WORD NumberOfNamedEntries;
// WORD NumberOfIdEntries;
//// IMAGE_RESOURCE_DIRECTORY_ENTRY DirectoryEntries[];
//} IMAGE_RESOURCE_DIRECTORY, *PIMAGE_RESOURCE_DIRECTORY;
//#define IMAGE_RESOURCE_NAME_IS_STRING 0x80000000
//#define IMAGE_RESOURCE_DATA_IS_DIRECTORY 0x80000000
////
//// Each directory contains the 32-bit Name of the entry and an offset,
//// relative to the beginning of the resource directory of the data associated
//// with this directory entry. If the name of the entry is an actual text
//// string instead of an integer Id, then the high order bit of the name field
//// is set to one and the low order 31-bits are an offset, relative to the
//// beginning of the resource directory of the string, which is of type
//// IMAGE_RESOURCE_DIRECTORY_STRING. Otherwise the high bit is clear and the
//// low-order 16-bits are the integer Id that identify this resource directory
//// entry. If the directory entry is yet another resource directory (i.e. a
//// subdirectory), then the high order bit of the offset field will be
//// set to indicate this. Otherwise the high bit is clear and the offset
//// field points to a resource data entry.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY_ENTRY {
// union {
// struct {
// DWORD NameOffset:31;
// DWORD NameIsString:1;
// } DUMMYSTRUCTNAME;
// DWORD Name;
// WORD Id;
// } DUMMYUNIONNAME;
// union {
// DWORD OffsetToData;
// struct {
// DWORD OffsetToDirectory:31;
// DWORD DataIsDirectory:1;
// } DUMMYSTRUCTNAME2;
// } DUMMYUNIONNAME2;
//} IMAGE_RESOURCE_DIRECTORY_ENTRY, *PIMAGE_RESOURCE_DIRECTORY_ENTRY;
////
//// For resource directory entries that have actual string names, the Name
//// field of the directory entry points to an object of the following type.
//// All of these string objects are stored together after the last resource
//// directory entry and before the first resource data object. This minimizes
//// the impact of these variable length objects on the alignment of the fixed
//// size directory entry objects.
////
//typedef struct _IMAGE_RESOURCE_DIRECTORY_STRING {
// WORD Length;
// CHAR NameString[ 1 ];
//} IMAGE_RESOURCE_DIRECTORY_STRING, *PIMAGE_RESOURCE_DIRECTORY_STRING;
//typedef struct _IMAGE_RESOURCE_DIR_STRING_U {
// WORD Length;
// WCHAR NameString[ 1 ];
//} IMAGE_RESOURCE_DIR_STRING_U, *PIMAGE_RESOURCE_DIR_STRING_U;
////
//// Each resource data entry describes a leaf node in the resource directory
//// tree. It contains an offset, relative to the beginning of the resource
//// directory of the data for the resource, a size field that gives the number
//// of bytes of data at that offset, a CodePage that should be used when
//// decoding code point values within the resource data. Typically for new
//// applications the code page would be the unicode code page.
////
//typedef struct _IMAGE_RESOURCE_DATA_ENTRY {
// DWORD OffsetToData;
// DWORD Size;
// DWORD CodePage;
// DWORD Reserved;
//} IMAGE_RESOURCE_DATA_ENTRY, *PIMAGE_RESOURCE_DATA_ENTRY;
private class Directory
{
internal readonly string Name;
internal readonly int ID;
internal ushort NumberOfNamedEntries;
internal ushort NumberOfIdEntries;
internal readonly List<object> Entries;
internal Directory(string name, int id)
{
this.Name = name;
this.ID = id;
this.Entries = new List<object>();
}
}
private static int CompareResources(IWin32Resource left, IWin32Resource right)
{
int result = CompareResourceIdentifiers(left.TypeId, left.TypeName, right.TypeId, right.TypeName);
return (result == 0) ? CompareResourceIdentifiers(left.Id, left.Name, right.Id, right.Name) : result;
}
//when comparing a string vs ordinal, the string should always be less than the ordinal. Per the spec,
//entries identified by string must precede those identified by ordinal.
private static int CompareResourceIdentifiers(int xOrdinal, string xString, int yOrdinal, string yString)
{
if (xString == null)
{
if (yString == null)
{
return xOrdinal - yOrdinal;
}
else
{
return 1;
}
}
else if (yString == null)
{
return -1;
}
else
{
return String.Compare(xString, yString, StringComparison.OrdinalIgnoreCase);
}
}
//sort the resources by ID least to greatest then by NAME.
//Where strings and ordinals are compared, strings are less than ordinals.
internal static IEnumerable<IWin32Resource> SortResources(IEnumerable<IWin32Resource> resources)
{
return resources.OrderBy(CompareResources);
}
//Win32 resources are supplied to the compiler in one of two forms, .RES (the output of the resource compiler),
//or .OBJ (the output of running cvtres.exe on a .RES file). A .RES file is parsed and processed into
//a set of objects implementing IWin32Resources. These are then ordered and the final image form is constructed
//and written to the resource section. Resources in .OBJ form are already very close to their final output
//form. Rather than reading them and parsing them into a set of objects similar to those produced by
//processing a .RES file, we process them like the native linker would, copy the relevant sections from
//the .OBJ into our output and apply some fixups.
private void SerializeWin32Resources(uint resourcesRva)
{
var resourceSection = _module.Win32ResourceSection;
if (resourceSection != null)
{
SerializeWin32Resources(resourceSection, resourcesRva);
return;
}
var theResources = _module.Win32Resources;
if (IteratorHelper.EnumerableIsEmpty(theResources))
{
return;
}
SerializeWin32Resources(theResources, resourcesRva);
}
private void SerializeWin32Resources(IEnumerable<IWin32Resource> theResources, uint resourcesRva)
{
theResources = SortResources(theResources);
Directory typeDirectory = new Directory(string.Empty, 0);
Directory nameDirectory = null;
Directory languageDirectory = null;
int lastTypeID = int.MinValue;
string lastTypeName = null;
int lastID = int.MinValue;
string lastName = null;
uint sizeOfDirectoryTree = 16;
//EDMAURER note that this list is assumed to be sorted lowest to highest
//first by typeId, then by Id.
foreach (IWin32Resource r in theResources)
{
bool typeDifferent = (r.TypeId < 0 && r.TypeName != lastTypeName) || r.TypeId > lastTypeID;
if (typeDifferent)
{
lastTypeID = r.TypeId;
lastTypeName = r.TypeName;
if (lastTypeID < 0)
{
Debug.Assert(typeDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with types encoded as strings precede those encoded as ints");
typeDirectory.NumberOfNamedEntries++;
}
else
{
typeDirectory.NumberOfIdEntries++;
}
sizeOfDirectoryTree += 24;
typeDirectory.Entries.Add(nameDirectory = new Directory(lastTypeName, lastTypeID));
}
if (typeDifferent || (r.Id < 0 && r.Name != lastName) || r.Id > lastID)
{
lastID = r.Id;
lastName = r.Name;
if (lastID < 0)
{
Debug.Assert(nameDirectory.NumberOfIdEntries == 0, "Not all Win32 resources with names encoded as strings precede those encoded as ints");
nameDirectory.NumberOfNamedEntries++;
}
else
{
nameDirectory.NumberOfIdEntries++;
}
sizeOfDirectoryTree += 24;
nameDirectory.Entries.Add(languageDirectory = new Directory(lastName, lastID));
}
languageDirectory.NumberOfIdEntries++;
sizeOfDirectoryTree += 8;
languageDirectory.Entries.Add(r);
}
MemoryStream stream = MemoryStream.GetInstance();
BinaryWriter dataWriter = new BinaryWriter(stream, true);
//'dataWriter' is where opaque resource data goes as well as strings that are used as type or name identifiers
this.WriteDirectory(typeDirectory, _win32ResourceWriter, 0, 0, sizeOfDirectoryTree, resourcesRva, dataWriter);
dataWriter.BaseStream.WriteTo(_win32ResourceWriter.BaseStream);
_win32ResourceWriter.WriteByte(0);
while ((_win32ResourceWriter.BaseStream.Length % 4) != 0)
{
_win32ResourceWriter.WriteByte(0);
}
stream.Free();
}
private void WriteDirectory(Directory directory, BinaryWriter writer, uint offset, uint level, uint sizeOfDirectoryTree, uint virtualAddressBase, BinaryWriter dataWriter)
{
writer.WriteUint(0); // Characteristics
writer.WriteUint(0); // Timestamp
writer.WriteUint(0); // Version
writer.WriteUshort(directory.NumberOfNamedEntries);
writer.WriteUshort(directory.NumberOfIdEntries);
uint n = (uint)directory.Entries.Count;
uint k = offset + 16 + n * 8;
for (int i = 0; i < n; i++)
{
int id;
string name;
uint nameOffset = dataWriter.BaseStream.Position + sizeOfDirectoryTree;
uint directoryOffset = k;
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
id = subDir.ID;
name = subDir.Name;
if (level == 0)
{
k += SizeOfDirectory(subDir);
}
else
{
k += 16 + 8 * (uint)subDir.Entries.Count;
}
}
else
{
//EDMAURER write out an IMAGE_RESOURCE_DATA_ENTRY followed
//immediately by the data that it refers to. This results
//in a layout different than that produced by pulling the resources
//from an OBJ. In that case all of the data bits of a resource are
//contiguous in .rsrc$02. After processing these will end up at
//the end of .rsrc following all of the directory
//info and IMAGE_RESOURCE_DATA_ENTRYs
IWin32Resource r = (IWin32Resource)directory.Entries[i];
id = level == 0 ? r.TypeId : level == 1 ? r.Id : (int)r.LanguageId;
name = level == 0 ? r.TypeName : level == 1 ? r.Name : null;
dataWriter.WriteUint(virtualAddressBase + sizeOfDirectoryTree + 16 + dataWriter.BaseStream.Position);
byte[] data = new List<byte>(r.Data).ToArray();
dataWriter.WriteUint((uint)data.Length);
dataWriter.WriteUint(r.CodePage);
dataWriter.WriteUint(0);
dataWriter.WriteBytes(data);
while ((dataWriter.BaseStream.Length % 4) != 0)
{
dataWriter.WriteByte(0);
}
}
if (id >= 0)
{
writer.WriteInt(id);
}
else
{
if (name == null)
{
name = string.Empty;
}
writer.WriteUint(nameOffset | 0x80000000);
dataWriter.WriteUshort((ushort)name.Length);
dataWriter.WriteChars(name.ToCharArray()); // REVIEW: what happens if the name contains chars that do not fit into a single utf8 code point?
}
if (subDir != null)
{
writer.WriteUint(directoryOffset | 0x80000000);
}
else
{
writer.WriteUint(nameOffset);
}
}
k = offset + 16 + n * 8;
for (int i = 0; i < n; i++)
{
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
this.WriteDirectory(subDir, writer, k, level + 1, sizeOfDirectoryTree, virtualAddressBase, dataWriter);
if (level == 0)
{
k += SizeOfDirectory(subDir);
}
else
{
k += 16 + 8 * (uint)subDir.Entries.Count;
}
}
}
}
private static uint SizeOfDirectory(Directory/*!*/ directory)
{
uint n = (uint)directory.Entries.Count;
uint size = 16 + 8 * n;
for (int i = 0; i < n; i++)
{
Directory subDir = directory.Entries[i] as Directory;
if (subDir != null)
{
size += 16 + 8 * (uint)subDir.Entries.Count;
}
}
return size;
}
private void SerializeWin32Resources(ResourceSection resourceSections, uint resourcesRva)
{
_win32ResourceWriter.WriteBytes(resourceSections.SectionBytes);
var savedPosition = _win32ResourceWriter.BaseStream.Position;
var readStream = new System.IO.MemoryStream(resourceSections.SectionBytes);
var reader = new BinaryReader(readStream);
foreach (int addressToFixup in resourceSections.Relocations)
{
_win32ResourceWriter.BaseStream.Position = (uint)addressToFixup;
reader.BaseStream.Position = addressToFixup;
_win32ResourceWriter.WriteUint(reader.ReadUInt32() + resourcesRva);
}
_win32ResourceWriter.BaseStream.Position = savedPosition;
}
//#define IMAGE_FILE_RELOCS_STRIPPED 0x0001 // Relocation info stripped from file.
//#define IMAGE_FILE_EXECUTABLE_IMAGE 0x0002 // File is executable (i.e. no unresolved externel references).
//#define IMAGE_FILE_LINE_NUMS_STRIPPED 0x0004 // Line nunbers stripped from file.
//#define IMAGE_FILE_LOCAL_SYMS_STRIPPED 0x0008 // Local symbols stripped from file.
//#define IMAGE_FILE_AGGRESIVE_WS_TRIM 0x0010 // Agressively trim working set
//#define IMAGE_FILE_LARGE_ADDRESS_AWARE 0x0020 // App can handle >2gb addresses
//#define IMAGE_FILE_BYTES_REVERSED_LO 0x0080 // Bytes of machine word are reversed.
//#define IMAGE_FILE_32BIT_MACHINE 0x0100 // 32 bit word machine.
//#define IMAGE_FILE_DEBUG_STRIPPED 0x0200 // Debugging info stripped from file in .DBG file
//#define IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP 0x0400 // If Image is on removable media, copy and run from the swap file.
//#define IMAGE_FILE_NET_RUN_FROM_SWAP 0x0800 // If Image is on Net, copy and run from the swap file.
//#define IMAGE_FILE_SYSTEM 0x1000 // System File.
//#define IMAGE_FILE_DLL 0x2000 // File is a DLL.
//#define IMAGE_FILE_UP_SYSTEM_ONLY 0x4000 // File should only be run on a UP machine
//#define IMAGE_FILE_BYTES_REVERSED_HI 0x8000 // Bytes of machine word are reversed.
private static readonly byte[] s_dosHeader = new byte[]
{
0x4d, 0x5a, 0x90, 0x00, 0x03, 0x00, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00,
0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00,
0x0e, 0x1f, 0xba, 0x0e, 0x00, 0xb4, 0x09, 0xcd,
0x21, 0xb8, 0x01, 0x4c, 0xcd, 0x21, 0x54, 0x68,
0x69, 0x73, 0x20, 0x70, 0x72, 0x6f, 0x67, 0x72,
0x61, 0x6d, 0x20, 0x63, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x20, 0x62, 0x65, 0x20, 0x72, 0x75, 0x6e,
0x20, 0x69, 0x6e, 0x20, 0x44, 0x4f, 0x53, 0x20,
0x6d, 0x6f, 0x64, 0x65, 0x2e, 0x0d, 0x0d, 0x0a,
0x24, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
};
private void WriteHeaders(Stream peStream, out long ntHeaderTimestampPosition)
{
IModule module = _module;
NtHeader ntHeader = _ntHeader;
BinaryWriter writer = new BinaryWriter(_headerStream);
// MS-DOS stub (128 bytes)
writer.WriteBytes(s_dosHeader); // TODO: provide an option to suppress the second half of the DOS header?
// PE Signature (4 bytes)
writer.WriteUint(0x00004550); /* "PE\0\0" */
// COFF Header 20 bytes
writer.WriteUshort((ushort)module.Machine);
writer.WriteUshort(ntHeader.NumberOfSections);
ntHeaderTimestampPosition = writer.BaseStream.Position + peStream.Position;
writer.WriteUint(ntHeader.TimeDateStamp);
writer.WriteUint(ntHeader.PointerToSymbolTable);
writer.WriteUint(0); // NumberOfSymbols
writer.WriteUshort((ushort)(!module.Requires64bits ? 224 : 240)); // SizeOfOptionalHeader
// ushort characteristics = 0x0002|0x0004|0x0008; // executable | no COFF line nums | no COFF symbols (as required by the standard)
ushort characteristics = 0x0002; // executable (as required by the Linker team).
if (module.Kind == ModuleKind.DynamicallyLinkedLibrary || module.Kind == ModuleKind.WindowsRuntimeMetadata)
{
characteristics |= 0x2000;
}
if (module.Requires32bits)
{
characteristics |= 0x0100; // 32 bit machine (The standard says to always set this, the linker team says otherwise)
//The loader team says that this is not used for anything in the OS.
}
else
{
characteristics |= 0x0020; // large address aware (the standard says never to set this, the linker team says otherwise).
//The loader team says that this is not overridden for managed binaries and will be respected if set.
}
writer.WriteUshort(characteristics);
// PE Header (224 bytes if 32 bits, 240 bytes if 64 bit)
if (!module.Requires64bits)
{
writer.WriteUshort(0x10B); // Magic = PE32 // 2
}
else
{
writer.WriteUshort(0x20B); // Magic = PE32+ // 2
}
writer.WriteByte(module.LinkerMajorVersion); // 3
writer.WriteByte(module.LinkerMinorVersion); // 4
writer.WriteUint(ntHeader.SizeOfCode); // 8
writer.WriteUint(ntHeader.SizeOfInitializedData); // 12
writer.WriteUint(ntHeader.SizeOfUninitializedData); // 16
writer.WriteUint(ntHeader.AddressOfEntryPoint); // 20
writer.WriteUint(ntHeader.BaseOfCode); // 24
if (!module.Requires64bits)
{
writer.WriteUint(ntHeader.BaseOfData); // 28
writer.WriteUint((uint)module.BaseAddress); // 32
}
else
{
writer.WriteUlong(module.BaseAddress); // 32
}
writer.WriteUint(0x2000); // SectionAlignment 36
writer.WriteUint(module.FileAlignment); // 40
writer.WriteUshort(4); // MajorOperatingSystemVersion 42
writer.WriteUshort(0); // MinorOperatingSystemVersion 44
writer.WriteUshort(0); // MajorImageVersion 46
writer.WriteUshort(0); // MinorImageVersion 48
writer.WriteUshort(module.MajorSubsystemVersion); // MajorSubsystemVersion 50
writer.WriteUshort(module.MinorSubsystemVersion); // MinorSubsystemVersion 52
writer.WriteUint(0); // Win32VersionValue 56
writer.WriteUint(ntHeader.SizeOfImage); // 60
writer.WriteUint(ntHeader.SizeOfHeaders); // 64
writer.WriteUint(0); // CheckSum 68
switch (module.Kind)
{
case ModuleKind.ConsoleApplication:
case ModuleKind.DynamicallyLinkedLibrary:
case ModuleKind.WindowsRuntimeMetadata:
writer.WriteUshort(3); // 70
break;
case ModuleKind.WindowsApplication:
writer.WriteUshort(2); // 70
break;
default:
writer.WriteUshort(0); //
break;
}
writer.WriteUshort(module.DllCharacteristics);
if (!module.Requires64bits)
{
writer.WriteUint((uint)module.SizeOfStackReserve); // 76
writer.WriteUint((uint)module.SizeOfStackCommit); // 80
writer.WriteUint((uint)module.SizeOfHeapReserve); // 84
writer.WriteUint((uint)module.SizeOfHeapCommit); // 88
}
else
{
writer.WriteUlong(module.SizeOfStackReserve); // 80
writer.WriteUlong(module.SizeOfStackCommit); // 88
writer.WriteUlong(module.SizeOfHeapReserve); // 96
writer.WriteUlong(module.SizeOfHeapCommit); // 104
}
writer.WriteUint(0); // LoaderFlags 92|108
writer.WriteUint(16); // numberOfDataDirectories 96|112
writer.WriteUint(ntHeader.ExportTable.RelativeVirtualAddress); // 100|116
writer.WriteUint(ntHeader.ExportTable.Size); // 104|120
writer.WriteUint(ntHeader.ImportTable.RelativeVirtualAddress); // 108|124
writer.WriteUint(ntHeader.ImportTable.Size); // 112|128
writer.WriteUint(ntHeader.ResourceTable.RelativeVirtualAddress); // 116|132
writer.WriteUint(ntHeader.ResourceTable.Size); // 120|136
writer.WriteUint(ntHeader.ExceptionTable.RelativeVirtualAddress); // 124|140
writer.WriteUint(ntHeader.ExceptionTable.Size); // 128|144
writer.WriteUint(ntHeader.CertificateTable.RelativeVirtualAddress); // 132|148
writer.WriteUint(ntHeader.CertificateTable.Size); // 136|152
writer.WriteUint(ntHeader.BaseRelocationTable.RelativeVirtualAddress); // 140|156
writer.WriteUint(ntHeader.BaseRelocationTable.Size); // 144|160
writer.WriteUint(ntHeader.DebugTable.RelativeVirtualAddress); // 148|164
writer.WriteUint(ntHeader.DebugTable.Size); // 152|168
writer.WriteUint(ntHeader.CopyrightTable.RelativeVirtualAddress); // 156|172
writer.WriteUint(ntHeader.CopyrightTable.Size); // 160|176
writer.WriteUint(ntHeader.GlobalPointerTable.RelativeVirtualAddress); // 164|180
writer.WriteUint(ntHeader.GlobalPointerTable.Size); // 168|184
writer.WriteUint(ntHeader.ThreadLocalStorageTable.RelativeVirtualAddress); // 172|188
writer.WriteUint(ntHeader.ThreadLocalStorageTable.Size); // 176|192
writer.WriteUint(ntHeader.LoadConfigTable.RelativeVirtualAddress); // 180|196
writer.WriteUint(ntHeader.LoadConfigTable.Size); // 184|200
writer.WriteUint(ntHeader.BoundImportTable.RelativeVirtualAddress); // 188|204
writer.WriteUint(ntHeader.BoundImportTable.Size); // 192|208
writer.WriteUint(ntHeader.ImportAddressTable.RelativeVirtualAddress); // 196|212
writer.WriteUint(ntHeader.ImportAddressTable.Size); // 200|216
writer.WriteUint(ntHeader.DelayImportTable.RelativeVirtualAddress); // 204|220
writer.WriteUint(ntHeader.DelayImportTable.Size); // 208|224
writer.WriteUint(ntHeader.CliHeaderTable.RelativeVirtualAddress); // 212|228
writer.WriteUint(ntHeader.CliHeaderTable.Size); // 216|232
writer.WriteUlong(0); // 224|240
// Section Headers
WriteSectionHeader(_textSection, writer);
WriteSectionHeader(_rdataSection, writer);
WriteSectionHeader(_sdataSection, writer);
WriteSectionHeader(_coverSection, writer);
WriteSectionHeader(_resourceSection, writer);
WriteSectionHeader(_relocSection, writer);
WriteSectionHeader(_tlsSection, writer);
writer.BaseStream.WriteTo(peStream);
_headerStream = _emptyStream;
}
private static void WriteSectionHeader(SectionHeader sectionHeader, BinaryWriter writer)
{
if (sectionHeader.VirtualSize == 0)
{
return;
}
for (int j = 0, m = sectionHeader.Name.Length; j < 8; j++)
{
if (j < m)
{
writer.WriteByte((byte)sectionHeader.Name[j]);
}
else
{
writer.WriteByte(0);
}
}
writer.WriteUint(sectionHeader.VirtualSize);
writer.WriteUint(sectionHeader.RelativeVirtualAddress);
writer.WriteUint(sectionHeader.SizeOfRawData);
writer.WriteUint(sectionHeader.PointerToRawData);
writer.WriteUint(sectionHeader.PointerToRelocations);
writer.WriteUint(sectionHeader.PointerToLinenumbers);
writer.WriteUshort(sectionHeader.NumberOfRelocations);
writer.WriteUshort(sectionHeader.NumberOfLinenumbers);
writer.WriteUint(sectionHeader.Characteristics);
}
private void WriteTextSection(
Stream peStream,
CorHeader corHeader,
MemoryStream metadataStream,
MemoryStream ilStream,
MemoryStream mappedFieldDataStream,
MemoryStream managedResourceStream,
MetadataSizes metadataSizes,
ContentId pdbContentId,
out long metadataPosition)
{
peStream.Position = _textSection.PointerToRawData;
if (_emitRuntimeStartupStub)
{
this.WriteImportAddressTable(peStream);
}
WriteCorHeader(peStream, corHeader);
WriteIL(peStream, ilStream);
metadataPosition = peStream.Position;
WriteMetadata(peStream, metadataStream);
WriteManagedResources(peStream, managedResourceStream);
WriteSpaceForHash(peStream, (int)corHeader.StrongNameSignature.Size);
WriteDebugTable(peStream, pdbContentId, metadataSizes);
if (_emitRuntimeStartupStub)
{
WriteImportTable(peStream);
WriteNameTable(peStream);
WriteRuntimeStartupStub(peStream);
}
WriteMappedFieldData(peStream, mappedFieldDataStream);
}
private void WriteImportAddressTable(Stream peStream)
{
BinaryWriter writer = new BinaryWriter(new MemoryStream(16));
bool use32bitAddresses = !_module.Requires64bits;
uint importTableRVA = _ntHeader.ImportTable.RelativeVirtualAddress;
uint ilRVA = importTableRVA + 40;
uint hintRva = ilRVA + (use32bitAddresses ? 12u : 16u);
// Import Address Table
if (use32bitAddresses)
{
writer.WriteUint(hintRva); // 4
writer.WriteUint(0); // 8
}
else
{
writer.WriteUlong(hintRva); // 8
writer.WriteUlong(0); // 16
}
writer.BaseStream.WriteTo(peStream);
}
private void WriteImportTable(Stream peStream)
{
BinaryWriter writer = new BinaryWriter(new MemoryStream(70));
bool use32bitAddresses = !_module.Requires64bits;
uint importTableRVA = _ntHeader.ImportTable.RelativeVirtualAddress;
uint ilRVA = importTableRVA + 40;
uint hintRva = ilRVA + (use32bitAddresses ? 12u : 16u);
uint nameRva = hintRva + 12 + 2;
// Import table
writer.WriteUint(ilRVA); // 4
writer.WriteUint(0); // 8
writer.WriteUint(0); // 12
writer.WriteUint(nameRva); // 16
writer.WriteUint(_ntHeader.ImportAddressTable.RelativeVirtualAddress); // 20
writer.BaseStream.Position += 20; // 40
// Import Lookup table
if (use32bitAddresses)
{
writer.WriteUint(hintRva); // 44
writer.WriteUint(0); // 48
writer.WriteUint(0); // 52
}
else
{
writer.WriteUlong(hintRva); // 48
writer.WriteUlong(0); // 56
}
// Hint table
writer.WriteUshort(0); // Hint 54|58
string entryPointName =
(_module.Kind == ModuleKind.DynamicallyLinkedLibrary || _module.Kind == ModuleKind.WindowsRuntimeMetadata)
? "_CorDllMain" : "_CorExeMain";
foreach (char ch in entryPointName)
{
writer.WriteByte((byte)ch); // 65|69
}
writer.WriteByte(0); // 66|70
writer.BaseStream.WriteTo(peStream);
}
private static void WriteNameTable(Stream peStream)
{
BinaryWriter writer = new BinaryWriter(new MemoryStream(14));
foreach (char ch in "mscoree.dll")
{
writer.WriteByte((byte)ch); // 11
}
writer.WriteByte(0); // 12
writer.WriteUshort(0); // 14
writer.BaseStream.WriteTo(peStream);
}
private static void WriteCorHeader(Stream peStream, CorHeader corHeader)
{
BinaryWriter writer = new BinaryWriter(new MemoryStream(72));
writer.WriteUint(72); // Number of bytes in this header 4
writer.WriteUshort(corHeader.MajorRuntimeVersion); // 6
writer.WriteUshort(corHeader.MinorRuntimeVersion); // 8
writer.WriteUint(corHeader.MetadataDirectory.RelativeVirtualAddress); // 12
writer.WriteUint(corHeader.MetadataDirectory.Size); // 16
writer.WriteUint((uint)corHeader.Flags); // 20
writer.WriteUint(corHeader.EntryPointToken); // 24
writer.WriteUint(corHeader.Resources.Size == 0 ? 0u : corHeader.Resources.RelativeVirtualAddress); // 28
writer.WriteUint(corHeader.Resources.Size); // 32
writer.WriteUint(corHeader.StrongNameSignature.Size == 0 ? 0u : corHeader.StrongNameSignature.RelativeVirtualAddress); // 36
writer.WriteUint(corHeader.StrongNameSignature.Size); // 40
writer.WriteUint(corHeader.CodeManagerTable.RelativeVirtualAddress); // 44
writer.WriteUint(corHeader.CodeManagerTable.Size); // 48
writer.WriteUint(corHeader.VTableFixups.RelativeVirtualAddress); // 52
writer.WriteUint(corHeader.VTableFixups.Size); // 56
writer.WriteUint(corHeader.ExportAddressTableJumps.RelativeVirtualAddress); // 60
writer.WriteUint(corHeader.ExportAddressTableJumps.Size); // 64
writer.WriteUlong(0); // 72
writer.BaseStream.WriteTo(peStream);
}
private static void WriteIL(Stream peStream, MemoryStream ilStream)
{
ilStream.WriteTo(peStream);
while (peStream.Position % 4 != 0)
{
peStream.WriteByte(0);
}
}
private static void WriteMappedFieldData(Stream peStream, MemoryStream dataStream)
{
dataStream.WriteTo(peStream);
while (peStream.Position % 4 != 0)
{
peStream.WriteByte(0);
}
}
private static void WriteSpaceForHash(Stream peStream, int strongNameSignatureSize)
{
while (strongNameSignatureSize > 0)
{
peStream.WriteByte(0);
strongNameSignatureSize--;
}
}
private static void WriteMetadata(Stream peStream, MemoryStream metadataStream)
{
metadataStream.WriteTo(peStream);
while (peStream.Position % 4 != 0)
{
peStream.WriteByte(0);
}
}
private static void WriteManagedResources(Stream peStream, MemoryStream managedResourceStream)
{
managedResourceStream.WriteTo(peStream);
while (peStream.Position % 4 != 0)
{
peStream.WriteByte(0);
}
}
private void WriteDebugTable(Stream peStream, ContentId pdbContentId, MetadataSizes metadataSizes)
{
if (!EmitPdb)
{
return;
}
MemoryStream stream = new MemoryStream();
BinaryWriter writer = new BinaryWriter(stream);
// characteristics:
writer.WriteUint(0);
// PDB stamp
writer.WriteBytes(pdbContentId.Stamp);
// version
writer.WriteUint(0);
// type:
const int ImageDebugTypeCodeView = 2;
writer.WriteUint(ImageDebugTypeCodeView);
// size of data:
writer.WriteUint((uint)ComputeSizeOfDebugDirectoryData());
uint dataOffset = (uint)ComputeOffsetToDebugTable(metadataSizes) + ImageDebugDirectoryBaseSize;
// PointerToRawData (RVA of the data):
writer.WriteUint(_textSection.RelativeVirtualAddress + dataOffset);
// AddressOfRawData (position of the data in the PE stream):
writer.WriteUint(_textSection.PointerToRawData + dataOffset);
writer.WriteByte((byte)'R');
writer.WriteByte((byte)'S');
writer.WriteByte((byte)'D');
writer.WriteByte((byte)'S');
// PDB id:
writer.WriteBytes(pdbContentId.Guid);
// age
writer.WriteUint(PdbWriter.Age);
// UTF-8 encoded zero-terminated path to PDB
writer.WriteString(_pdbPathOpt, emitNullTerminator: true);
writer.BaseStream.WriteTo(peStream);
stream.Free();
}
private void WriteRuntimeStartupStub(Stream peStream)
{
BinaryWriter writer = new BinaryWriter(new MemoryStream(16));
// entry point code, consisting of a jump indirect to _CorXXXMain
if (!_module.Requires64bits)
{
//emit 0's (nops) to pad the entry point code so that the target address is aligned on a 4 byte boundary.
for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 4) - peStream.Position); i < n; i++) writer.WriteByte(0);
writer.WriteUshort(0);
writer.WriteByte(0xff);
writer.WriteByte(0x25); //4
writer.WriteUint(_ntHeader.ImportAddressTable.RelativeVirtualAddress + (uint)_module.BaseAddress); //8
}
else
{
//emit 0's (nops) to pad the entry point code so that the target address is aligned on a 8 byte boundary.
for (uint i = 0, n = (uint)(BitArithmeticUtilities.Align((uint)peStream.Position, 8) - peStream.Position); i < n; i++) writer.WriteByte(0);
writer.WriteUint(0);
writer.WriteUshort(0);
writer.WriteByte(0xff);
writer.WriteByte(0x25); //8
writer.WriteUlong(_ntHeader.ImportAddressTable.RelativeVirtualAddress + _module.BaseAddress); //16
}
writer.BaseStream.WriteTo(peStream);
}
private void WriteCoverSection(Stream peStream)
{
peStream.Position = _coverSection.PointerToRawData;
_coverageDataWriter.BaseStream.WriteTo(peStream);
}
private void WriteRdataSection(Stream peStream)
{
peStream.Position = _rdataSection.PointerToRawData;
_rdataWriter.BaseStream.WriteTo(peStream);
}
private void WriteSdataSection(Stream peStream)
{
peStream.Position = _sdataSection.PointerToRawData;
_sdataWriter.BaseStream.WriteTo(peStream);
}
private void WriteRelocSection(Stream peStream)
{
if (!_emitRuntimeStartupStub)
{
//No need to write out a reloc section, but there is still a need to pad out the peStream so that it is an even multiple of module.FileAlignment
if (_relocSection.PointerToRawData != peStream.Position)
{ //for example, the resource section did not end bang on the alignment boundary
peStream.Position = _relocSection.PointerToRawData - 1;
peStream.WriteByte(0);
}
return;
}
peStream.Position = _relocSection.PointerToRawData;
BinaryWriter writer = new BinaryWriter(new MemoryStream(_module.FileAlignment));
writer.WriteUint(((_ntHeader.AddressOfEntryPoint + 2) / 0x1000) * 0x1000);
writer.WriteUint(_module.Requires64bits && !_module.RequiresAmdInstructionSet ? 14u : 12u);
uint offsetWithinPage = (_ntHeader.AddressOfEntryPoint + 2) % 0x1000;
uint relocType = _module.Requires64bits ? 10u : 3u;
ushort s = (ushort)((relocType << 12) | offsetWithinPage);
writer.WriteUshort(s);
if (_module.Requires64bits && !_module.RequiresAmdInstructionSet)
{
writer.WriteUint(relocType << 12);
}
writer.WriteUshort(0); // next chunk's RVA
writer.BaseStream.Position = _module.FileAlignment;
writer.BaseStream.WriteTo(peStream);
}
private void WriteResourceSection(Stream peStream)
{
if (_win32ResourceWriter.BaseStream.Length == 0)
{
return;
}
peStream.Position = _resourceSection.PointerToRawData;
_win32ResourceWriter.BaseStream.WriteTo(peStream);
peStream.WriteByte(0);
while (peStream.Position % 8 != 0)
{
peStream.WriteByte(0);
}
}
private void WriteTlsSection(Stream peStream)
{
peStream.Position = _tlsSection.PointerToRawData;
_tlsDataWriter.BaseStream.WriteTo(peStream);
}
}
}
| furesoft/roslyn | src/Compilers/Core/Portable/PEWriter/PeWriter.cs | C# | apache-2.0 | 69,905 |
package gr.forth.ics.graph.event;
public interface GraphListener extends NodeListener, EdgeListener {
}
| DimitrisAndreou/flexigraph | src/gr/forth/ics/graph/event/GraphListener.java | Java | apache-2.0 | 105 |
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules;
import com.facebook.buck.rules.keys.DefaultRuleKeyCache;
import com.facebook.buck.rules.keys.RuleKeyFactories;
import com.facebook.buck.step.DefaultStepRunner;
import com.facebook.buck.testutil.DummyFileHashCache;
import com.facebook.buck.util.cache.FileHashCacheMode;
import com.facebook.buck.util.concurrent.ListeningMultiSemaphore;
import com.facebook.buck.util.concurrent.ResourceAllocationFairness;
import com.facebook.buck.util.concurrent.ResourceAmounts;
import com.facebook.buck.util.concurrent.WeightedListeningExecutorService;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import java.util.Optional;
/** Handy way to create new {@link CachingBuildEngine} instances for test purposes. */
public class CachingBuildEngineFactory {
private CachingBuildEngine.BuildMode buildMode = CachingBuildEngine.BuildMode.SHALLOW;
private CachingBuildEngine.MetadataStorage metadataStorage =
CachingBuildEngine.MetadataStorage.FILESYSTEM;
private CachingBuildEngine.DepFiles depFiles = CachingBuildEngine.DepFiles.ENABLED;
private long maxDepFileCacheEntries = 256L;
private Optional<Long> artifactCacheSizeLimit = Optional.empty();
private long inputFileSizeLimit = Long.MAX_VALUE;
private Optional<RuleKeyFactories> ruleKeyFactories = Optional.empty();
private CachingBuildEngineDelegate cachingBuildEngineDelegate;
private WeightedListeningExecutorService executorService;
private BuildRuleResolver buildRuleResolver;
private ResourceAwareSchedulingInfo resourceAwareSchedulingInfo =
ResourceAwareSchedulingInfo.NON_AWARE_SCHEDULING_INFO;
private boolean logBuildRuleFailuresInline = true;
private BuildInfoStoreManager buildInfoStoreManager;
private FileHashCacheMode fileHashCacheMode = FileHashCacheMode.DEFAULT;
public CachingBuildEngineFactory(
BuildRuleResolver buildRuleResolver, BuildInfoStoreManager buildInfoStoreManager) {
this.cachingBuildEngineDelegate = new LocalCachingBuildEngineDelegate(new DummyFileHashCache());
this.executorService = toWeighted(MoreExecutors.newDirectExecutorService());
this.buildRuleResolver = buildRuleResolver;
this.buildInfoStoreManager = buildInfoStoreManager;
}
public CachingBuildEngineFactory setBuildMode(CachingBuildEngine.BuildMode buildMode) {
this.buildMode = buildMode;
return this;
}
public CachingBuildEngineFactory setFileHashCachMode(FileHashCacheMode fileHashCachMode) {
this.fileHashCacheMode = fileHashCachMode;
return this;
}
public CachingBuildEngineFactory setDepFiles(CachingBuildEngine.DepFiles depFiles) {
this.depFiles = depFiles;
return this;
}
public CachingBuildEngineFactory setMaxDepFileCacheEntries(long maxDepFileCacheEntries) {
this.maxDepFileCacheEntries = maxDepFileCacheEntries;
return this;
}
public CachingBuildEngineFactory setArtifactCacheSizeLimit(
Optional<Long> artifactCacheSizeLimit) {
this.artifactCacheSizeLimit = artifactCacheSizeLimit;
return this;
}
public CachingBuildEngineFactory setCachingBuildEngineDelegate(
CachingBuildEngineDelegate cachingBuildEngineDelegate) {
this.cachingBuildEngineDelegate = cachingBuildEngineDelegate;
return this;
}
public CachingBuildEngineFactory setExecutorService(ListeningExecutorService executorService) {
this.executorService = toWeighted(executorService);
return this;
}
public CachingBuildEngineFactory setExecutorService(
WeightedListeningExecutorService executorService) {
this.executorService = executorService;
return this;
}
public CachingBuildEngineFactory setRuleKeyFactories(RuleKeyFactories ruleKeyFactories) {
this.ruleKeyFactories = Optional.of(ruleKeyFactories);
return this;
}
public CachingBuildEngineFactory setLogBuildRuleFailuresInline(
boolean logBuildRuleFailuresInline) {
this.logBuildRuleFailuresInline = logBuildRuleFailuresInline;
return this;
}
public CachingBuildEngine build() {
if (ruleKeyFactories.isPresent()) {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver);
return new CachingBuildEngine(
cachingBuildEngineDelegate,
executorService,
new DefaultStepRunner(),
buildMode,
metadataStorage,
depFiles,
maxDepFileCacheEntries,
artifactCacheSizeLimit,
buildRuleResolver,
buildInfoStoreManager,
ruleFinder,
DefaultSourcePathResolver.from(ruleFinder),
ruleKeyFactories.get(),
resourceAwareSchedulingInfo,
logBuildRuleFailuresInline,
fileHashCacheMode);
}
return new CachingBuildEngine(
cachingBuildEngineDelegate,
executorService,
new DefaultStepRunner(),
buildMode,
metadataStorage,
depFiles,
maxDepFileCacheEntries,
artifactCacheSizeLimit,
buildRuleResolver,
buildInfoStoreManager,
resourceAwareSchedulingInfo,
logBuildRuleFailuresInline,
RuleKeyFactories.of(
0,
cachingBuildEngineDelegate.getFileHashCache(),
buildRuleResolver,
inputFileSizeLimit,
new DefaultRuleKeyCache<>()),
fileHashCacheMode);
}
private static WeightedListeningExecutorService toWeighted(ListeningExecutorService service) {
return new WeightedListeningExecutorService(
new ListeningMultiSemaphore(
ResourceAmounts.of(Integer.MAX_VALUE, 0, 0, 0), ResourceAllocationFairness.FAIR),
/* defaultPermits */ ResourceAmounts.of(1, 0, 0, 0),
service);
}
}
| k21/buck | test/com/facebook/buck/rules/CachingBuildEngineFactory.java | Java | apache-2.0 | 6,351 |
/*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package org.finos.waltz.jobs.harness;
import org.finos.waltz.data.measurable.MeasurableIdSelectorFactory;
import org.finos.waltz.data.measurable_rating.MeasurableRatingDao;
import org.finos.waltz.model.EntityReference;
import org.finos.waltz.model.IdSelectionOptions;
import org.finos.waltz.model.tally.MeasurableRatingTally;
import org.finos.waltz.model.tally.Tally;
import org.finos.waltz.service.DIConfiguration;
import org.jooq.Record1;
import org.jooq.Select;
import org.jooq.tools.json.ParseException;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import java.util.List;
import static org.finos.waltz.model.EntityKind.MEASURABLE;
import static org.finos.waltz.model.EntityReference.mkRef;
import static org.finos.waltz.model.HierarchyQueryScope.CHILDREN;
public class MeasurableRatingHarness {
public static void main(String[] args) throws ParseException {
AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class);
MeasurableRatingDao measurableRatingDao = ctx.getBean(MeasurableRatingDao.class);
MeasurableIdSelectorFactory measurableIdSelectorFactory = new MeasurableIdSelectorFactory();
EntityReference direct = mkRef(MEASURABLE, 18310);
EntityReference indirect = mkRef(MEASURABLE, 18064);
IdSelectionOptions directOpts = IdSelectionOptions.mkOpts(direct, CHILDREN);
IdSelectionOptions indirectOpts = IdSelectionOptions.mkOpts(indirect, CHILDREN);
Select<Record1<Long>> directSelector = measurableIdSelectorFactory.apply(directOpts);
Select<Record1<Long>> indirectSelector = measurableIdSelectorFactory.apply(indirectOpts);
List<MeasurableRatingTally> directTallies = measurableRatingDao.statsForRelatedMeasurable(directSelector);
List<MeasurableRatingTally> indirectTallies = measurableRatingDao.statsForRelatedMeasurable(indirectSelector);
List<Tally<Long>> tallies = measurableRatingDao.tallyByMeasurableCategoryId(1L);
System.out.println(tallies);
}
}
| khartec/waltz | waltz-jobs/src/main/java/org/finos/waltz/jobs/harness/MeasurableRatingHarness.java | Java | apache-2.0 | 2,714 |
package org.finos.waltz.integration_test.inmem.helpers;
import org.finos.waltz.model.EntityKind;
import org.finos.waltz.model.EntityReference;
import org.finos.waltz.model.Operation;
import org.finos.waltz.model.involvement.EntityInvolvementChangeCommand;
import org.finos.waltz.model.involvement.ImmutableEntityInvolvementChangeCommand;
import org.finos.waltz.model.involvement_kind.ImmutableInvolvementKindCreateCommand;
import org.finos.waltz.model.involvement_kind.InvolvementKindCreateCommand;
import org.finos.waltz.service.involvement.InvolvementService;
import org.finos.waltz.service.involvement_kind.InvolvementKindService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import static org.finos.waltz.model.EntityReference.mkRef;
@Service
public class InvolvementHelper {
private final InvolvementService involvementService;
private final InvolvementKindService involvementKindService;
@Autowired
public InvolvementHelper(InvolvementService involvementService,
InvolvementKindService involvementKindService) {
this.involvementService = involvementService;
this.involvementKindService = involvementKindService;
}
public long mkInvolvementKind(String name) {
InvolvementKindCreateCommand cmd = ImmutableInvolvementKindCreateCommand.builder()
.description(name)
.name(name)
.externalId(name)
.build();
return involvementKindService.create(cmd, NameHelper.mkUserId("involvementHelper"));
}
public void createInvolvement(Long pId, long invId, EntityReference entity) {
EntityInvolvementChangeCommand cmd = ImmutableEntityInvolvementChangeCommand.builder()
.involvementKindId((int) invId)
.personEntityRef(mkRef(EntityKind.PERSON, pId))
.operation(Operation.ADD)
.build();
involvementService.addEntityInvolvement(NameHelper.mkUserId(), entity, cmd);
}
}
| khartec/waltz | waltz-integration-test/src/test/java/org/finos/waltz/integration_test/inmem/helpers/InvolvementHelper.java | Java | apache-2.0 | 2,071 |
/**
* Copyright © 2012-2016 <a href="https://github.com/tlkzzz/jeesite">JeeSite</a> All rights reserved.
*/
package com.tlkzzz.jeesite.modules.sys.service;
import java.util.List;
import com.tlkzzz.jeesite.common.service.CrudService;
import com.tlkzzz.jeesite.common.utils.CacheUtils;
import com.tlkzzz.jeesite.modules.sys.entity.Dict;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.tlkzzz.jeesite.modules.sys.dao.DictDao;
import com.tlkzzz.jeesite.modules.sys.utils.DictUtils;
/**
* 字典Service
* @author tlkzzz
* @version 2014-05-16
*/
@Service
@Transactional(readOnly = true)
public class DictService extends CrudService<DictDao, Dict> {
/**
* 查询字段类型列表
* @return
*/
public List<String> findTypeList(){
return dao.findTypeList(new Dict());
}
@Transactional(readOnly = false)
public void save(Dict dict) {
super.save(dict);
CacheUtils.remove(DictUtils.CACHE_DICT_MAP);
}
@Transactional(readOnly = false)
public void delete(Dict dict) {
super.delete(dict);
CacheUtils.remove(DictUtils.CACHE_DICT_MAP);
}
}
| tlkzzz/xpjfx | src/main/java/com/tlkzzz/jeesite/modules/sys/service/DictService.java | Java | apache-2.0 | 1,144 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.authorization.accesscontrol;
import java.security.Principal;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.security.AccessControlEntry;
import javax.jcr.security.AccessControlList;
import javax.jcr.security.AccessControlPolicy;
import com.google.common.collect.ImmutableMap;
import org.apache.jackrabbit.api.security.JackrabbitAccessControlManager;
import org.apache.jackrabbit.api.security.principal.JackrabbitPrincipal;
import org.apache.jackrabbit.commons.jackrabbit.authorization.AccessControlUtils;
import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal;
import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl;
import org.jetbrains.annotations.NotNull;
import org.junit.Before;
import org.junit.Test;
import static org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants.REP_GLOB;
import static org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants.REP_NODE_PATH;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
public class PrincipalACLTest extends AbstractAccessControlTest {
private ACL principalAcl;
@Override
@Before
public void before() throws Exception {
super.before();
JackrabbitAccessControlManager acMgr = getAccessControlManager(root);
AccessControlList policy = AccessControlUtils.getAccessControlList(acMgr, TEST_PATH);
policy.addAccessControlEntry(testPrincipal, testPrivileges);
policy.addAccessControlEntry(EveryonePrincipal.getInstance(), testPrivileges);
acMgr.setPolicy(TEST_PATH, policy);
root.commit();
principalAcl = getPrincipalAcl(acMgr, testPrincipal);
}
@NotNull
private static ACL getPrincipalAcl(@NotNull JackrabbitAccessControlManager acMgr, @NotNull Principal testPrincipal) throws RepositoryException {
for (AccessControlPolicy acp : acMgr.getPolicies(testPrincipal)) {
if (acp instanceof ACL) {
return (ACL) acp;
}
}
throw new RuntimeException("no principal acl found");
}
@Test(expected = UnsupportedRepositoryOperationException.class)
public void testReorder() throws Exception {
AccessControlEntry[] entries = principalAcl.getAccessControlEntries();
principalAcl.orderBefore(entries[0], null);
}
@Test
public void testEquals() throws Exception {
assertEquals(principalAcl, principalAcl);
assertEquals(principalAcl, getPrincipalAcl(getAccessControlManager(root), testPrincipal));
}
@Test
public void testEqualsDifferentPrincipal() throws Exception {
assertNotEquals(principalAcl, getPrincipalAcl(getAccessControlManager(root), EveryonePrincipal.getInstance()));
}
@Test
public void testEqualsDifferentACL() throws Exception {
assertNotEquals(principalAcl, AccessControlUtils.getAccessControlList(getAccessControlManager(root), TEST_PATH));
}
@Test
public void testEqualsDifferentPath() throws Exception {
ACL acl = getPrincipalAcl(getAccessControlManager(root), new PrincipalImpl(testPrincipal.getName()));
assertNotEquals(principalAcl, acl);
}
@Test
public void testEqualsDifferentEntries() throws Exception {
ValueFactory vf = getValueFactory(root);
ACL acl = getPrincipalAcl(getAccessControlManager(root), testPrincipal);
acl.addEntry(testPrincipal, privilegesFromNames(JCR_VERSION_MANAGEMENT), true,
ImmutableMap.of(REP_GLOB, vf.createValue("/subtree/*"), REP_NODE_PATH, vf.createValue(TEST_PATH)));
assertNotEquals(principalAcl, acl);
}
@Test
public void testHashCode() {
assertEquals(0, principalAcl.hashCode());
}
}
| trekawek/jackrabbit-oak | oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/PrincipalACLTest.java | Java | apache-2.0 | 4,767 |
#
# Copyright:: Copyright (c) 2014 Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef-dk/command/base'
require 'chef-dk/ui'
require 'chef-dk/policyfile_services/install'
require 'chef-dk/policyfile_services/update_attributes'
require 'chef-dk/configurable'
module ChefDK
module Command
class Update < Base
include Configurable
banner(<<-BANNER)
Usage: chef update [ POLICY_FILE ] [options]
`chef update` reads your `Policyfile.rb`, applies any changes, re-solves the
dependencies and emits an updated `Policyfile.lock.json`. The new locked policy
will reflect any changes to the `run_list` and pull in any cookbook updates
that are compatible with the version constraints stated in your `Policyfile.rb`.
NOTE: `chef update` does not yet support granular updates (e.g., just updating
the `run_list` or a specific cookbook version). Support will be added in a
future version.
See our detailed README for more information:
https://docs.chef.io/policyfile.html
Options:
BANNER
option :config_file,
short: "-c CONFIG_FILE",
long: "--config CONFIG_FILE",
description: "Path to configuration file"
option :debug,
short: "-D",
long: "--debug",
description: "Enable stacktraces and other debug output",
default: false,
boolean: true
option :update_attributes,
short: "-a",
long: "--attributes",
description: "Update attributes",
default: false,
boolean: true
attr_reader :policyfile_relative_path
attr_accessor :ui
def initialize(*args)
super
@ui = UI.new
@policyfile_relative_path = nil
@installer = nil
@attributes_updater = nil
end
def run(params = [])
return 1 unless apply_params!(params)
# Force config file to be loaded. We don't use the configuration
# directly, but the user may have SSL configuration options that they
# need to talk to a private supermarket (e.g., trusted_certs or
# ssl_verify_mode)
chef_config
if update_attributes?
attributes_updater.run
else
installer.run
end
0
rescue PolicyfileServiceError => e
handle_error(e)
1
end
def installer
@installer ||= PolicyfileServices::Install.new(policyfile: policyfile_relative_path, ui: ui, root_dir: Dir.pwd, overwrite: true)
end
def attributes_updater
@attributes_updater ||=
PolicyfileServices::UpdateAttributes.new(policyfile: policyfile_relative_path, ui: ui, root_dir: Dir.pwd)
end
def debug?
!!config[:debug]
end
def config_path
config[:config_file]
end
def update_attributes?
!!config[:update_attributes]
end
def handle_error(error)
ui.err("Error: #{error.message}")
if error.respond_to?(:reason)
ui.err("Reason: #{error.reason}")
ui.err("")
ui.err(error.extended_error_info) if debug?
ui.err(error.cause.backtrace.join("\n")) if debug?
end
end
def apply_params!(params)
remaining_args = parse_options(params)
if remaining_args.size > 1
ui.err(opt_parser)
false
else
@policyfile_relative_path = remaining_args.first
true
end
end
end
end
end
| vinyar/chef-dk | lib/chef-dk/command/update.rb | Ruby | apache-2.0 | 4,086 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.dataFlow.inliner;
import com.intellij.codeInspection.dataFlow.CFGBuilder;
import com.intellij.codeInspection.dataFlow.Nullness;
import com.intellij.codeInspection.dataFlow.SpecialField;
import com.intellij.codeInspection.dataFlow.value.DfaValueFactory;
import com.intellij.codeInspection.dataFlow.value.DfaVariableValue;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiMethodCallExpression;
import com.intellij.psi.PsiVariable;
import com.siyeh.ig.callMatcher.CallMapper;
import org.jetbrains.annotations.NotNull;
import static com.intellij.codeInspection.dataFlow.SpecialField.COLLECTION_SIZE;
import static com.intellij.codeInspection.dataFlow.SpecialField.MAP_SIZE;
import static com.intellij.psi.CommonClassNames.JAVA_UTIL_COLLECTIONS;
import static com.siyeh.ig.callMatcher.CallMatcher.staticCall;
public class CollectionFactoryInliner implements CallInliner {
static final class FactoryInfo {
int mySize;
SpecialField mySizeField;
public FactoryInfo(int size, SpecialField sizeField) {
mySize = size;
mySizeField = sizeField;
}
}
private static final CallMapper<FactoryInfo> STATIC_FACTORIES = new CallMapper<FactoryInfo>()
.register(staticCall(JAVA_UTIL_COLLECTIONS, "emptyList", "emptySet").parameterCount(0), new FactoryInfo(0, COLLECTION_SIZE))
.register(staticCall(JAVA_UTIL_COLLECTIONS, "singletonList", "singleton").parameterCount(1), new FactoryInfo(1, COLLECTION_SIZE))
.register(staticCall(JAVA_UTIL_COLLECTIONS, "emptyMap").parameterCount(0), new FactoryInfo(0, MAP_SIZE))
.register(staticCall(JAVA_UTIL_COLLECTIONS, "singletonMap").parameterCount(2), new FactoryInfo(1, MAP_SIZE));
@Override
public boolean tryInlineCall(@NotNull CFGBuilder builder, @NotNull PsiMethodCallExpression call) {
FactoryInfo factoryInfo = STATIC_FACTORIES.mapFirst(call);
if (factoryInfo == null) return false;
PsiExpression[] args = call.getArgumentList().getExpressions();
for (PsiExpression arg : args) {
builder.pushExpression(arg).pop();
}
PsiVariable variable = builder.createTempVariable(call.getType());
DfaValueFactory factory = builder.getFactory();
DfaVariableValue variableValue = factory.getVarFactory().createVariableValue(variable, false);
builder.pushVariable(variable) // tmpVar = <Value of collection type>
.push(factory.createTypeValue(call.getType(), Nullness.NOT_NULL))
.assign() // leave tmpVar on stack: it's result of method call
.push(factoryInfo.mySizeField.createValue(factory, variableValue)) // tmpVar.size = <size>
.push(factory.getInt(factoryInfo.mySize))
.assign()
.pop();
return true;
}
}
| apixandru/intellij-community | java/java-analysis-impl/src/com/intellij/codeInspection/dataFlow/inliner/CollectionFactoryInliner.java | Java | apache-2.0 | 3,317 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.26.0
// protoc v3.12.2
// source: google/cloud/osconfig/v1alpha/instance_os_policies_compliance.proto
package osconfig
import (
reflect "reflect"
sync "sync"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This API resource represents the OS policies compliance data for a Compute
// Engine virtual machine (VM) instance at a given point in time.
//
// A Compute Engine VM can have multiple OS policy assignments, and each
// assignment can have multiple OS policies. As a result, multiple OS policies
// could be applied to a single VM.
//
// You can use this API resource to determine both the compliance state of your
// VM as well as the compliance state of an individual OS policy.
//
// For more information, see [View
// compliance](https://cloud.google.com/compute/docs/os-configuration-management/view-compliance).
type InstanceOSPoliciesCompliance struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Output only. The `InstanceOSPoliciesCompliance` API resource name.
//
// Format:
// `projects/{project_number}/locations/{location}/instanceOSPoliciesCompliances/{instance_id}`
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Output only. The Compute Engine VM instance name.
Instance string `protobuf:"bytes,2,opt,name=instance,proto3" json:"instance,omitempty"`
// Output only. Compliance state of the VM.
State OSPolicyComplianceState `protobuf:"varint,3,opt,name=state,proto3,enum=google.cloud.osconfig.v1alpha.OSPolicyComplianceState" json:"state,omitempty"`
// Output only. Detailed compliance state of the VM.
// This field is populated only when compliance state is `UNKNOWN`.
//
// It may contain one of the following values:
//
// * `no-compliance-data`: Compliance data is not available for this VM.
// * `no-agent-detected`: OS Config agent is not detected for this VM.
// * `config-not-supported-by-agent`: The version of the OS Config agent
// running on this VM does not support configuration management.
// * `inactive`: VM is not running.
// * `internal-service-errors`: There were internal service errors encountered
// while enforcing compliance.
// * `agent-errors`: OS config agent encountered errors while enforcing
// compliance.
DetailedState string `protobuf:"bytes,4,opt,name=detailed_state,json=detailedState,proto3" json:"detailed_state,omitempty"`
// Output only. The reason for the `detailed_state` of the VM (if any).
DetailedStateReason string `protobuf:"bytes,5,opt,name=detailed_state_reason,json=detailedStateReason,proto3" json:"detailed_state_reason,omitempty"`
// Output only. Compliance data for each `OSPolicy` that is applied to the VM.
OsPolicyCompliances []*InstanceOSPoliciesCompliance_OSPolicyCompliance `protobuf:"bytes,6,rep,name=os_policy_compliances,json=osPolicyCompliances,proto3" json:"os_policy_compliances,omitempty"`
// Output only. Timestamp of the last compliance check for the VM.
LastComplianceCheckTime *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=last_compliance_check_time,json=lastComplianceCheckTime,proto3" json:"last_compliance_check_time,omitempty"`
// Output only. Unique identifier for the last compliance run.
// This id will be logged by the OS config agent during a compliance run and
// can be used for debugging and tracing purpose.
LastComplianceRunId string `protobuf:"bytes,8,opt,name=last_compliance_run_id,json=lastComplianceRunId,proto3" json:"last_compliance_run_id,omitempty"`
}
func (x *InstanceOSPoliciesCompliance) Reset() {
*x = InstanceOSPoliciesCompliance{}
if protoimpl.UnsafeEnabled {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InstanceOSPoliciesCompliance) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InstanceOSPoliciesCompliance) ProtoMessage() {}
func (x *InstanceOSPoliciesCompliance) ProtoReflect() protoreflect.Message {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InstanceOSPoliciesCompliance.ProtoReflect.Descriptor instead.
func (*InstanceOSPoliciesCompliance) Descriptor() ([]byte, []int) {
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP(), []int{0}
}
func (x *InstanceOSPoliciesCompliance) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *InstanceOSPoliciesCompliance) GetInstance() string {
if x != nil {
return x.Instance
}
return ""
}
func (x *InstanceOSPoliciesCompliance) GetState() OSPolicyComplianceState {
if x != nil {
return x.State
}
return OSPolicyComplianceState_OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED
}
func (x *InstanceOSPoliciesCompliance) GetDetailedState() string {
if x != nil {
return x.DetailedState
}
return ""
}
func (x *InstanceOSPoliciesCompliance) GetDetailedStateReason() string {
if x != nil {
return x.DetailedStateReason
}
return ""
}
func (x *InstanceOSPoliciesCompliance) GetOsPolicyCompliances() []*InstanceOSPoliciesCompliance_OSPolicyCompliance {
if x != nil {
return x.OsPolicyCompliances
}
return nil
}
func (x *InstanceOSPoliciesCompliance) GetLastComplianceCheckTime() *timestamppb.Timestamp {
if x != nil {
return x.LastComplianceCheckTime
}
return nil
}
func (x *InstanceOSPoliciesCompliance) GetLastComplianceRunId() string {
if x != nil {
return x.LastComplianceRunId
}
return ""
}
// A request message for getting OS policies compliance data for the given
// Compute Engine VM instance.
type GetInstanceOSPoliciesComplianceRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. API resource name for instance OS policies compliance resource.
//
// Format:
// `projects/{project}/locations/{location}/instanceOSPoliciesCompliances/{instance}`
//
// For `{project}`, either Compute Engine project-number or project-id can be
// provided.
// For `{instance}`, either Compute Engine VM instance-id or instance-name can
// be provided.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
}
func (x *GetInstanceOSPoliciesComplianceRequest) Reset() {
*x = GetInstanceOSPoliciesComplianceRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GetInstanceOSPoliciesComplianceRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetInstanceOSPoliciesComplianceRequest) ProtoMessage() {}
func (x *GetInstanceOSPoliciesComplianceRequest) ProtoReflect() protoreflect.Message {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetInstanceOSPoliciesComplianceRequest.ProtoReflect.Descriptor instead.
func (*GetInstanceOSPoliciesComplianceRequest) Descriptor() ([]byte, []int) {
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP(), []int{1}
}
func (x *GetInstanceOSPoliciesComplianceRequest) GetName() string {
if x != nil {
return x.Name
}
return ""
}
// A request message for listing OS policies compliance data for all Compute
// Engine VMs in the given location.
type ListInstanceOSPoliciesCompliancesRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Required. The parent resource name.
//
// Format: `projects/{project}/locations/{location}`
//
// For `{project}`, either Compute Engine project-number or project-id can be
// provided.
Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
// The maximum number of results to return.
PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
// A pagination token returned from a previous call to
// `ListInstanceOSPoliciesCompliances` that indicates where this listing
// should continue from.
PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
// If provided, this field specifies the criteria that must be met by a
// `InstanceOSPoliciesCompliance` API resource to be included in the response.
Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"`
}
func (x *ListInstanceOSPoliciesCompliancesRequest) Reset() {
*x = ListInstanceOSPoliciesCompliancesRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListInstanceOSPoliciesCompliancesRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListInstanceOSPoliciesCompliancesRequest) ProtoMessage() {}
func (x *ListInstanceOSPoliciesCompliancesRequest) ProtoReflect() protoreflect.Message {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListInstanceOSPoliciesCompliancesRequest.ProtoReflect.Descriptor instead.
func (*ListInstanceOSPoliciesCompliancesRequest) Descriptor() ([]byte, []int) {
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP(), []int{2}
}
func (x *ListInstanceOSPoliciesCompliancesRequest) GetParent() string {
if x != nil {
return x.Parent
}
return ""
}
func (x *ListInstanceOSPoliciesCompliancesRequest) GetPageSize() int32 {
if x != nil {
return x.PageSize
}
return 0
}
func (x *ListInstanceOSPoliciesCompliancesRequest) GetPageToken() string {
if x != nil {
return x.PageToken
}
return ""
}
func (x *ListInstanceOSPoliciesCompliancesRequest) GetFilter() string {
if x != nil {
return x.Filter
}
return ""
}
// A response message for listing OS policies compliance data for all Compute
// Engine VMs in the given location.
type ListInstanceOSPoliciesCompliancesResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// List of instance OS policies compliance objects.
InstanceOsPoliciesCompliances []*InstanceOSPoliciesCompliance `protobuf:"bytes,1,rep,name=instance_os_policies_compliances,json=instanceOsPoliciesCompliances,proto3" json:"instance_os_policies_compliances,omitempty"`
// The pagination token to retrieve the next page of instance OS policies
// compliance objects.
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
}
func (x *ListInstanceOSPoliciesCompliancesResponse) Reset() {
*x = ListInstanceOSPoliciesCompliancesResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListInstanceOSPoliciesCompliancesResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListInstanceOSPoliciesCompliancesResponse) ProtoMessage() {}
func (x *ListInstanceOSPoliciesCompliancesResponse) ProtoReflect() protoreflect.Message {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListInstanceOSPoliciesCompliancesResponse.ProtoReflect.Descriptor instead.
func (*ListInstanceOSPoliciesCompliancesResponse) Descriptor() ([]byte, []int) {
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP(), []int{3}
}
func (x *ListInstanceOSPoliciesCompliancesResponse) GetInstanceOsPoliciesCompliances() []*InstanceOSPoliciesCompliance {
if x != nil {
return x.InstanceOsPoliciesCompliances
}
return nil
}
func (x *ListInstanceOSPoliciesCompliancesResponse) GetNextPageToken() string {
if x != nil {
return x.NextPageToken
}
return ""
}
// Compliance data for an OS policy
type InstanceOSPoliciesCompliance_OSPolicyCompliance struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// The OS policy id
OsPolicyId string `protobuf:"bytes,1,opt,name=os_policy_id,json=osPolicyId,proto3" json:"os_policy_id,omitempty"`
// Reference to the `OSPolicyAssignment` API resource that the `OSPolicy`
// belongs to.
//
// Format:
// `projects/{project_number}/locations/{location}/osPolicyAssignments/{os_policy_assignment_id@revision_id}`
OsPolicyAssignment string `protobuf:"bytes,2,opt,name=os_policy_assignment,json=osPolicyAssignment,proto3" json:"os_policy_assignment,omitempty"`
// Compliance state of the OS policy.
State OSPolicyComplianceState `protobuf:"varint,4,opt,name=state,proto3,enum=google.cloud.osconfig.v1alpha.OSPolicyComplianceState" json:"state,omitempty"`
// Compliance data for each `OSPolicyResource` that is applied to the
// VM.
OsPolicyResourceCompliances []*OSPolicyResourceCompliance `protobuf:"bytes,5,rep,name=os_policy_resource_compliances,json=osPolicyResourceCompliances,proto3" json:"os_policy_resource_compliances,omitempty"`
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) Reset() {
*x = InstanceOSPoliciesCompliance_OSPolicyCompliance{}
if protoimpl.UnsafeEnabled {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*InstanceOSPoliciesCompliance_OSPolicyCompliance) ProtoMessage() {}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) ProtoReflect() protoreflect.Message {
mi := &file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use InstanceOSPoliciesCompliance_OSPolicyCompliance.ProtoReflect.Descriptor instead.
func (*InstanceOSPoliciesCompliance_OSPolicyCompliance) Descriptor() ([]byte, []int) {
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP(), []int{0, 0}
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) GetOsPolicyId() string {
if x != nil {
return x.OsPolicyId
}
return ""
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) GetOsPolicyAssignment() string {
if x != nil {
return x.OsPolicyAssignment
}
return ""
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) GetState() OSPolicyComplianceState {
if x != nil {
return x.State
}
return OSPolicyComplianceState_OS_POLICY_COMPLIANCE_STATE_UNSPECIFIED
}
func (x *InstanceOSPoliciesCompliance_OSPolicyCompliance) GetOsPolicyResourceCompliances() []*OSPolicyResourceCompliance {
if x != nil {
return x.OsPolicyResourceCompliances
}
return nil
}
var File_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto protoreflect.FileDescriptor
var file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDesc = []byte{
0x0a, 0x43, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2f, 0x6f,
0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2f,
0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x6f, 0x73, 0x5f, 0x70, 0x6f, 0x6c, 0x69,
0x63, 0x69, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c,
0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x61,
0x6c, 0x70, 0x68, 0x61, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69,
0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70,
0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x1a, 0x31, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2f, 0x6f,
0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2f,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x22, 0xab, 0x08, 0x0a, 0x1c, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63,
0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c,
0x69, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x17, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f,
0x0a, 0x08, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x08, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12,
0x51, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x36,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4f,
0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63,
0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x05, 0x73, 0x74, 0x61,
0x74, 0x65, 0x12, 0x2a, 0x0a, 0x0e, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x73,
0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52,
0x0d, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37,
0x0a, 0x15, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65,
0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0,
0x41, 0x03, 0x52, 0x13, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74,
0x65, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x87, 0x01, 0x0a, 0x15, 0x6f, 0x73, 0x5f, 0x70,
0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65,
0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x4e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65,
0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69,
0x61, 0x6e, 0x63, 0x65, 0x2e, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x43, 0x6f, 0x6d,
0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x13, 0x6f, 0x73,
0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65,
0x73, 0x12, 0x5c, 0x0a, 0x1a, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69,
0x61, 0x6e, 0x63, 0x65, 0x5f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18,
0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d,
0x70, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x17, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70,
0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x54, 0x69, 0x6d, 0x65, 0x12,
0x38, 0x0a, 0x16, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e,
0x63, 0x65, 0x5f, 0x72, 0x75, 0x6e, 0x5f, 0x69, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x42,
0x03, 0xe0, 0x41, 0x03, 0x52, 0x13, 0x6c, 0x61, 0x73, 0x74, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69,
0x61, 0x6e, 0x63, 0x65, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x1a, 0xe7, 0x02, 0x0a, 0x12, 0x4f, 0x53,
0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65,
0x12, 0x20, 0x0a, 0x0c, 0x6f, 0x73, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f, 0x69, 0x64,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x73, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79,
0x49, 0x64, 0x12, 0x61, 0x0a, 0x14, 0x6f, 0x73, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x5f,
0x61, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x42, 0x2f, 0xfa, 0x41, 0x2c, 0x0a, 0x2a, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4f,
0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x6d, 0x65, 0x6e,
0x74, 0x52, 0x12, 0x6f, 0x73, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x41, 0x73, 0x73, 0x69, 0x67,
0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x4c, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04,
0x20, 0x01, 0x28, 0x0e, 0x32, 0x36, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c,
0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x61,
0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79, 0x43, 0x6f, 0x6d,
0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74,
0x61, 0x74, 0x65, 0x12, 0x7e, 0x0a, 0x1e, 0x6f, 0x73, 0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x79,
0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69,
0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e,
0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x4f, 0x53, 0x50, 0x6f,
0x6c, 0x69, 0x63, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6d, 0x70,
0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x1b, 0x6f, 0x73, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x79,
0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e,
0x63, 0x65, 0x73, 0x3a, 0x8c, 0x01, 0xea, 0x41, 0x88, 0x01, 0x0a, 0x34, 0x6f, 0x73, 0x63, 0x6f,
0x6e, 0x66, 0x69, 0x67, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e,
0x63, 0x6f, 0x6d, 0x2f, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x4f, 0x53, 0x50, 0x6f,
0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65,
0x12, 0x50, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x2f, 0x7b, 0x70, 0x72, 0x6f, 0x6a,
0x65, 0x63, 0x74, 0x7d, 0x2f, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x7b,
0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x7d, 0x2f, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e,
0x63, 0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70,
0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63,
0x65, 0x7d, 0x22, 0x7a, 0x0a, 0x26, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63,
0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c,
0x69, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x50, 0x0a, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x3c, 0xe0, 0x41, 0x02, 0xfa,
0x41, 0x36, 0x0a, 0x34, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x49, 0x6e, 0x73, 0x74,
0x61, 0x6e, 0x63, 0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f,
0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0xc1,
0x01, 0x0a, 0x28, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x4f,
0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61,
0x6e, 0x63, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x41, 0x0a, 0x06, 0x70,
0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x29, 0xe0, 0x41, 0x02,
0xfa, 0x41, 0x23, 0x0a, 0x21, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4c, 0x6f,
0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x1b,
0x0a, 0x09, 0x70, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
0x05, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x70,
0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
0x09, 0x70, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x66, 0x69,
0x6c, 0x74, 0x65, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74,
0x65, 0x72, 0x22, 0xda, 0x01, 0x0a, 0x29, 0x4c, 0x69, 0x73, 0x74, 0x49, 0x6e, 0x73, 0x74, 0x61,
0x6e, 0x63, 0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d,
0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x12, 0x84, 0x01, 0x0a, 0x20, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x6f, 0x73,
0x5f, 0x70, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x69,
0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e,
0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x2e, 0x49, 0x6e, 0x73, 0x74,
0x61, 0x6e, 0x63, 0x65, 0x4f, 0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f,
0x6d, 0x70, 0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x1d, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6e,
0x63, 0x65, 0x4f, 0x73, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70,
0x6c, 0x69, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x12, 0x26, 0x0a, 0x0f, 0x6e, 0x65, 0x78, 0x74, 0x5f,
0x70, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
0x52, 0x0d, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x42,
0xf2, 0x01, 0x0a, 0x21, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63,
0x6c, 0x6f, 0x75, 0x64, 0x2e, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x76, 0x31,
0x61, 0x6c, 0x70, 0x68, 0x61, 0x42, 0x21, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x4f,
0x53, 0x50, 0x6f, 0x6c, 0x69, 0x63, 0x69, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x69, 0x61,
0x6e, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x45, 0x67, 0x6f, 0x6f, 0x67,
0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x67, 0x65,
0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69,
0x73, 0x2f, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x2f, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x3b, 0x6f, 0x73, 0x63, 0x6f, 0x6e, 0x66, 0x69,
0x67, 0xaa, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64,
0x2e, 0x4f, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x56, 0x31, 0x41, 0x6c, 0x70, 0x68,
0x61, 0xca, 0x02, 0x1d, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x5c, 0x43, 0x6c, 0x6f, 0x75, 0x64,
0x5c, 0x4f, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68,
0x61, 0xea, 0x02, 0x20, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x3a, 0x3a, 0x43, 0x6c, 0x6f, 0x75,
0x64, 0x3a, 0x3a, 0x4f, 0x73, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x3a, 0x3a, 0x56, 0x31, 0x61,
0x6c, 0x70, 0x68, 0x61, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescOnce sync.Once
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescData = file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDesc
)
func file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescGZIP() []byte {
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescOnce.Do(func() {
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescData)
})
return file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDescData
}
var file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_goTypes = []interface{}{
(*InstanceOSPoliciesCompliance)(nil), // 0: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance
(*GetInstanceOSPoliciesComplianceRequest)(nil), // 1: google.cloud.osconfig.v1alpha.GetInstanceOSPoliciesComplianceRequest
(*ListInstanceOSPoliciesCompliancesRequest)(nil), // 2: google.cloud.osconfig.v1alpha.ListInstanceOSPoliciesCompliancesRequest
(*ListInstanceOSPoliciesCompliancesResponse)(nil), // 3: google.cloud.osconfig.v1alpha.ListInstanceOSPoliciesCompliancesResponse
(*InstanceOSPoliciesCompliance_OSPolicyCompliance)(nil), // 4: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.OSPolicyCompliance
(OSPolicyComplianceState)(0), // 5: google.cloud.osconfig.v1alpha.OSPolicyComplianceState
(*timestamppb.Timestamp)(nil), // 6: google.protobuf.Timestamp
(*OSPolicyResourceCompliance)(nil), // 7: google.cloud.osconfig.v1alpha.OSPolicyResourceCompliance
}
var file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_depIdxs = []int32{
5, // 0: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.state:type_name -> google.cloud.osconfig.v1alpha.OSPolicyComplianceState
4, // 1: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.os_policy_compliances:type_name -> google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.OSPolicyCompliance
6, // 2: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.last_compliance_check_time:type_name -> google.protobuf.Timestamp
0, // 3: google.cloud.osconfig.v1alpha.ListInstanceOSPoliciesCompliancesResponse.instance_os_policies_compliances:type_name -> google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance
5, // 4: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.OSPolicyCompliance.state:type_name -> google.cloud.osconfig.v1alpha.OSPolicyComplianceState
7, // 5: google.cloud.osconfig.v1alpha.InstanceOSPoliciesCompliance.OSPolicyCompliance.os_policy_resource_compliances:type_name -> google.cloud.osconfig.v1alpha.OSPolicyResourceCompliance
6, // [6:6] is the sub-list for method output_type
6, // [6:6] is the sub-list for method input_type
6, // [6:6] is the sub-list for extension type_name
6, // [6:6] is the sub-list for extension extendee
0, // [0:6] is the sub-list for field type_name
}
func init() { file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_init() }
func file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_init() {
if File_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto != nil {
return
}
file_google_cloud_osconfig_v1alpha_config_common_proto_init()
if !protoimpl.UnsafeEnabled {
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InstanceOSPoliciesCompliance); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetInstanceOSPoliciesComplianceRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListInstanceOSPoliciesCompliancesRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListInstanceOSPoliciesCompliancesResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InstanceOSPoliciesCompliance_OSPolicyCompliance); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDesc,
NumEnums: 0,
NumMessages: 5,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_goTypes,
DependencyIndexes: file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_depIdxs,
MessageInfos: file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_msgTypes,
}.Build()
File_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto = out.File
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_rawDesc = nil
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_goTypes = nil
file_google_cloud_osconfig_v1alpha_instance_os_policies_compliance_proto_depIdxs = nil
}
| google/go-genproto | googleapis/cloud/osconfig/v1alpha/instance_os_policies_compliance.pb.go | GO | apache-2.0 | 36,598 |
/*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.io;
import com.google.j2objc.WeakProxy;
import java.util.Formatter;
import java.util.Locale;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.UnsupportedCharsetException;
/**
* A <code>PrintStream</code> adds functionality to another output stream,
* namely the ability to print representations of various data values
* conveniently. Two other features are provided as well. Unlike other output
* streams, a <code>PrintStream</code> never throws an
* <code>IOException</code>; instead, exceptional situations merely set an
* internal flag that can be tested via the <code>checkError</code> method.
* Optionally, a <code>PrintStream</code> can be created so as to flush
* automatically; this means that the <code>flush</code> method is
* automatically invoked after a byte array is written, one of the
* <code>println</code> methods is invoked, or a newline character or byte
* (<code>'\n'</code>) is written.
*
* <p> All characters printed by a <code>PrintStream</code> are converted into
* bytes using the platform's default character encoding. The <code>{@link
* PrintWriter}</code> class should be used in situations that require writing
* characters rather than bytes.
*
* @author Frank Yellin
* @author Mark Reinhold
* @since JDK1.0
*/
public class PrintStream extends FilterOutputStream
implements Appendable, Closeable
{
private final boolean autoFlush;
private boolean trouble = false;
private Formatter formatter;
/**
* Track both the text- and character-output streams, so that their buffers
* can be flushed without flushing the entire stream.
*/
private BufferedWriter textOut;
private OutputStreamWriter charOut;
private Charset charset;
/**
* requireNonNull is explicitly declared here so as not to create an extra
* dependency on java.util.Objects.requireNonNull. PrintStream is loaded
* early during system initialization.
*/
private static <T> T requireNonNull(T obj, String message) {
if (obj == null)
throw new NullPointerException(message);
return obj;
}
/**
* Returns a charset object for the given charset name.
* @throws NullPointerException is csn is null
* @throws UnsupportedEncodingException if the charset is not supported
*/
private static Charset toCharset(String csn)
throws UnsupportedEncodingException
{
requireNonNull(csn, "charsetName");
try {
return Charset.forName(csn);
} catch (IllegalCharsetNameException|UnsupportedCharsetException unused) {
// UnsupportedEncodingException should be thrown
throw new UnsupportedEncodingException(csn);
}
}
/* Private constructors */
private PrintStream(boolean autoFlush, OutputStream out) {
super(out);
this.autoFlush = autoFlush;
}
private PrintStream(boolean autoFlush, OutputStream out, Charset charset) {
super(out);
this.autoFlush = autoFlush;
}
/* Variant of the private constructor so that the given charset name
* can be verified before evaluating the OutputStream argument. Used
* by constructors creating a FileOutputStream that also take a
* charset name.
*/
private PrintStream(boolean autoFlush, Charset charset, OutputStream out)
throws UnsupportedEncodingException
{
this(autoFlush, out, charset);
}
/**
* Creates a new print stream. This stream will not flush automatically.
*
* @param out The output stream to which values and objects will be
* printed
*
* @see java.io.PrintWriter#PrintWriter(java.io.OutputStream)
*/
public PrintStream(OutputStream out) {
this(out, false);
}
/**
* Creates a new print stream.
*
* @param out The output stream to which values and objects will be
* printed
* @param autoFlush A boolean; if true, the output buffer will be flushed
* whenever a byte array is written, one of the
* <code>println</code> methods is invoked, or a newline
* character or byte (<code>'\n'</code>) is written
*
* @see java.io.PrintWriter#PrintWriter(java.io.OutputStream, boolean)
*/
public PrintStream(OutputStream out, boolean autoFlush) {
this(autoFlush, requireNonNull(out, "Null output stream"));
}
/**
* Creates a new print stream.
*
* @param out The output stream to which values and objects will be
* printed
* @param autoFlush A boolean; if true, the output buffer will be flushed
* whenever a byte array is written, one of the
* <code>println</code> methods is invoked, or a newline
* character or byte (<code>'\n'</code>) is written
* @param encoding The name of a supported
* <a href="../lang/package-summary.html#charenc">
* character encoding</a>
*
* @throws UnsupportedEncodingException
* If the named encoding is not supported
*
* @since 1.4
*/
public PrintStream(OutputStream out, boolean autoFlush, String encoding)
throws UnsupportedEncodingException
{
this(autoFlush,
requireNonNull(out, "Null output stream"),
toCharset(encoding));
}
/**
* Creates a new print stream, without automatic line flushing, with the
* specified file name. This convenience constructor creates
* the necessary intermediate {@link java.io.OutputStreamWriter
* OutputStreamWriter}, which will encode characters using the
* {@linkplain java.nio.charset.Charset#defaultCharset() default charset}
* for this instance of the Java virtual machine.
*
* @param fileName
* The name of the file to use as the destination of this print
* stream. If the file exists, then it will be truncated to
* zero size; otherwise, a new file will be created. The output
* will be written to the file and is buffered.
*
* @throws FileNotFoundException
* If the given file object does not denote an existing, writable
* regular file and a new regular file of that name cannot be
* created, or if some other error occurs while opening or
* creating the file
*
* @throws SecurityException
* If a security manager is present and {@link
* SecurityManager#checkWrite checkWrite(fileName)} denies write
* access to the file
*
* @since 1.5
*/
public PrintStream(String fileName) throws FileNotFoundException {
this(false, new FileOutputStream(fileName));
}
/**
* Creates a new print stream, without automatic line flushing, with the
* specified file name and charset. This convenience constructor creates
* the necessary intermediate {@link java.io.OutputStreamWriter
* OutputStreamWriter}, which will encode characters using the provided
* charset.
*
* @param fileName
* The name of the file to use as the destination of this print
* stream. If the file exists, then it will be truncated to
* zero size; otherwise, a new file will be created. The output
* will be written to the file and is buffered.
*
* @param csn
* The name of a supported {@linkplain java.nio.charset.Charset
* charset}
*
* @throws FileNotFoundException
* If the given file object does not denote an existing, writable
* regular file and a new regular file of that name cannot be
* created, or if some other error occurs while opening or
* creating the file
*
* @throws SecurityException
* If a security manager is present and {@link
* SecurityManager#checkWrite checkWrite(fileName)} denies write
* access to the file
*
* @throws UnsupportedEncodingException
* If the named charset is not supported
*
* @since 1.5
*/
public PrintStream(String fileName, String csn)
throws FileNotFoundException, UnsupportedEncodingException
{
// ensure charset is checked before the file is opened
this(false, toCharset(csn), new FileOutputStream(fileName));
}
/**
* Creates a new print stream, without automatic line flushing, with the
* specified file. This convenience constructor creates the necessary
* intermediate {@link java.io.OutputStreamWriter OutputStreamWriter},
* which will encode characters using the {@linkplain
* java.nio.charset.Charset#defaultCharset() default charset} for this
* instance of the Java virtual machine.
*
* @param file
* The file to use as the destination of this print stream. If the
* file exists, then it will be truncated to zero size; otherwise,
* a new file will be created. The output will be written to the
* file and is buffered.
*
* @throws FileNotFoundException
* If the given file object does not denote an existing, writable
* regular file and a new regular file of that name cannot be
* created, or if some other error occurs while opening or
* creating the file
*
* @throws SecurityException
* If a security manager is present and {@link
* SecurityManager#checkWrite checkWrite(file.getPath())}
* denies write access to the file
*
* @since 1.5
*/
public PrintStream(File file) throws FileNotFoundException {
this(false, new FileOutputStream(file));
}
/**
* Creates a new print stream, without automatic line flushing, with the
* specified file and charset. This convenience constructor creates
* the necessary intermediate {@link java.io.OutputStreamWriter
* OutputStreamWriter}, which will encode characters using the provided
* charset.
*
* @param file
* The file to use as the destination of this print stream. If the
* file exists, then it will be truncated to zero size; otherwise,
* a new file will be created. The output will be written to the
* file and is buffered.
*
* @param csn
* The name of a supported {@linkplain java.nio.charset.Charset
* charset}
*
* @throws FileNotFoundException
* If the given file object does not denote an existing, writable
* regular file and a new regular file of that name cannot be
* created, or if some other error occurs while opening or
* creating the file
*
* @throws SecurityException
* If a security manager is present and {@link
* SecurityManager#checkWrite checkWrite(file.getPath())}
* denies write access to the file
*
* @throws UnsupportedEncodingException
* If the named charset is not supported
*
* @since 1.5
*/
public PrintStream(File file, String csn)
throws FileNotFoundException, UnsupportedEncodingException
{
// ensure charset is checked before the file is opened
this(false, toCharset(csn), new FileOutputStream(file));
}
/** Check to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (out == null)
throw new IOException("Stream closed");
}
/**
* Flushes the stream. This is done by writing any buffered output bytes to
* the underlying output stream and then flushing that stream.
*
* @see java.io.OutputStream#flush()
*/
public void flush() {
synchronized (this) {
try {
ensureOpen();
out.flush();
}
catch (IOException x) {
trouble = true;
}
}
}
private boolean closing = false; /* To avoid recursive closing */
// Android-changed: Lazily initialize textOut.
private BufferedWriter getTextOut() {
if (textOut == null) {
PrintStream proxy = WeakProxy.forObject(this);
charOut = charset != null ? new OutputStreamWriter(proxy, charset) :
new OutputStreamWriter(proxy);
textOut = new BufferedWriter(charOut);
}
return textOut;
}
/**
* Closes the stream. This is done by flushing the stream and then closing
* the underlying output stream.
*
* @see java.io.OutputStream#close()
*/
public void close() {
synchronized (this) {
if (! closing) {
closing = true;
try {
// Android-changed: Lazily initialized.
if (textOut != null) {
textOut.close();
}
out.close();
}
catch (IOException x) {
trouble = true;
}
textOut = null;
charOut = null;
out = null;
}
}
}
/**
* Flushes the stream and checks its error state. The internal error state
* is set to <code>true</code> when the underlying output stream throws an
* <code>IOException</code> other than <code>InterruptedIOException</code>,
* and when the <code>setError</code> method is invoked. If an operation
* on the underlying output stream throws an
* <code>InterruptedIOException</code>, then the <code>PrintStream</code>
* converts the exception back into an interrupt by doing:
* <pre>
* Thread.currentThread().interrupt();
* </pre>
* or the equivalent.
*
* @return <code>true</code> if and only if this stream has encountered an
* <code>IOException</code> other than
* <code>InterruptedIOException</code>, or the
* <code>setError</code> method has been invoked
*/
public boolean checkError() {
if (out != null)
flush();
if (out instanceof java.io.PrintStream) {
PrintStream ps = (PrintStream) out;
return ps.checkError();
}
return trouble;
}
/**
* Sets the error state of the stream to <code>true</code>.
*
* <p> This method will cause subsequent invocations of {@link
* #checkError()} to return <tt>true</tt> until {@link
* #clearError()} is invoked.
*
* @since JDK1.1
*/
protected void setError() {
trouble = true;
}
/**
* Clears the internal error state of this stream.
*
* <p> This method will cause subsequent invocations of {@link
* #checkError()} to return <tt>false</tt> until another write
* operation fails and invokes {@link #setError()}.
*
* @since 1.6
*/
protected void clearError() {
trouble = false;
}
/*
* Exception-catching, synchronized output operations,
* which also implement the write() methods of OutputStream
*/
/**
* Writes the specified byte to this stream. If the byte is a newline and
* automatic flushing is enabled then the <code>flush</code> method will be
* invoked.
*
* <p> Note that the byte is written as given; to write a character that
* will be translated according to the platform's default character
* encoding, use the <code>print(char)</code> or <code>println(char)</code>
* methods.
*
* @param b The byte to be written
* @see #print(char)
* @see #println(char)
*/
public void write(int b) {
try {
synchronized (this) {
ensureOpen();
out.write(b);
if ((b == '\n') && autoFlush)
out.flush();
}
}
catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
}
catch (IOException x) {
trouble = true;
}
}
/**
* Writes <code>len</code> bytes from the specified byte array starting at
* offset <code>off</code> to this stream. If automatic flushing is
* enabled then the <code>flush</code> method will be invoked.
*
* <p> Note that the bytes will be written as given; to write characters
* that will be translated according to the platform's default character
* encoding, use the <code>print(char)</code> or <code>println(char)</code>
* methods.
*
* @param buf A byte array
* @param off Offset from which to start taking bytes
* @param len Number of bytes to write
*/
public void write(byte buf[], int off, int len) {
try {
synchronized (this) {
ensureOpen();
out.write(buf, off, len);
if (autoFlush)
out.flush();
}
}
catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
}
catch (IOException x) {
trouble = true;
}
}
/*
* The following private methods on the text- and character-output streams
* always flush the stream buffers, so that writes to the underlying byte
* stream occur as promptly as with the original PrintStream.
*/
private void write(char buf[]) {
try {
synchronized (this) {
ensureOpen();
// Android-changed: Lazily initialized.
BufferedWriter textOut = getTextOut();
textOut.write(buf);
textOut.flushBuffer();
charOut.flushBuffer();
if (autoFlush) {
for (int i = 0; i < buf.length; i++)
if (buf[i] == '\n')
out.flush();
}
}
}
catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
}
catch (IOException x) {
trouble = true;
}
}
private void write(String s) {
try {
synchronized (this) {
ensureOpen();
// Android-changed: Lazily initialized.
BufferedWriter textOut = getTextOut();
textOut.write(s);
textOut.flushBuffer();
charOut.flushBuffer();
if (autoFlush && (s.indexOf('\n') >= 0))
out.flush();
}
}
catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
}
catch (IOException x) {
trouble = true;
}
}
private void newLine() {
try {
synchronized (this) {
ensureOpen();
// Android-changed: Lazily initialized.
BufferedWriter textOut = getTextOut();
textOut.newLine();
textOut.flushBuffer();
charOut.flushBuffer();
if (autoFlush)
out.flush();
}
}
catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
}
catch (IOException x) {
trouble = true;
}
}
/* Methods that do not terminate lines */
/**
* Prints a boolean value. The string produced by <code>{@link
* java.lang.String#valueOf(boolean)}</code> is translated into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param b The <code>boolean</code> to be printed
*/
public void print(boolean b) {
write(b ? "true" : "false");
}
/**
* Prints a character. The character is translated into one or more bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param c The <code>char</code> to be printed
*/
public void print(char c) {
write(String.valueOf(c));
}
/**
* Prints an integer. The string produced by <code>{@link
* java.lang.String#valueOf(int)}</code> is translated into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param i The <code>int</code> to be printed
* @see java.lang.Integer#toString(int)
*/
public void print(int i) {
write(String.valueOf(i));
}
/**
* Prints a long integer. The string produced by <code>{@link
* java.lang.String#valueOf(long)}</code> is translated into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param l The <code>long</code> to be printed
* @see java.lang.Long#toString(long)
*/
public void print(long l) {
write(String.valueOf(l));
}
/**
* Prints a floating-point number. The string produced by <code>{@link
* java.lang.String#valueOf(float)}</code> is translated into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param f The <code>float</code> to be printed
* @see java.lang.Float#toString(float)
*/
public void print(float f) {
write(String.valueOf(f));
}
/**
* Prints a double-precision floating-point number. The string produced by
* <code>{@link java.lang.String#valueOf(double)}</code> is translated into
* bytes according to the platform's default character encoding, and these
* bytes are written in exactly the manner of the <code>{@link
* #write(int)}</code> method.
*
* @param d The <code>double</code> to be printed
* @see java.lang.Double#toString(double)
*/
public void print(double d) {
write(String.valueOf(d));
}
/**
* Prints an array of characters. The characters are converted into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param s The array of chars to be printed
*
* @throws NullPointerException If <code>s</code> is <code>null</code>
*/
public void print(char s[]) {
write(s);
}
/**
* Prints a string. If the argument is <code>null</code> then the string
* <code>"null"</code> is printed. Otherwise, the string's characters are
* converted into bytes according to the platform's default character
* encoding, and these bytes are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param s The <code>String</code> to be printed
*/
public void print(String s) {
if (s == null) {
s = "null";
}
write(s);
}
/**
* Prints an object. The string produced by the <code>{@link
* java.lang.String#valueOf(Object)}</code> method is translated into bytes
* according to the platform's default character encoding, and these bytes
* are written in exactly the manner of the
* <code>{@link #write(int)}</code> method.
*
* @param obj The <code>Object</code> to be printed
* @see java.lang.Object#toString()
*/
public void print(Object obj) {
write(String.valueOf(obj));
}
/* Methods that do terminate lines */
/**
* Terminates the current line by writing the line separator string. The
* line separator string is defined by the system property
* <code>line.separator</code>, and is not necessarily a single newline
* character (<code>'\n'</code>).
*/
public void println() {
newLine();
}
/**
* Prints a boolean and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(boolean)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>boolean</code> to be printed
*/
public void println(boolean x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints a character and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(char)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>char</code> to be printed.
*/
public void println(char x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints an integer and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(int)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>int</code> to be printed.
*/
public void println(int x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints a long and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(long)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x a The <code>long</code> to be printed.
*/
public void println(long x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints a float and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(float)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>float</code> to be printed.
*/
public void println(float x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints a double and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(double)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>double</code> to be printed.
*/
public void println(double x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints an array of characters and then terminate the line. This method
* behaves as though it invokes <code>{@link #print(char[])}</code> and
* then <code>{@link #println()}</code>.
*
* @param x an array of chars to print.
*/
public void println(char x[]) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints a String and then terminate the line. This method behaves as
* though it invokes <code>{@link #print(String)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>String</code> to be printed.
*/
public void println(String x) {
synchronized (this) {
print(x);
newLine();
}
}
/**
* Prints an Object and then terminate the line. This method calls
* at first String.valueOf(x) to get the printed object's string value,
* then behaves as
* though it invokes <code>{@link #print(String)}</code> and then
* <code>{@link #println()}</code>.
*
* @param x The <code>Object</code> to be printed.
*/
public void println(Object x) {
String s = String.valueOf(x);
synchronized (this) {
print(s);
newLine();
}
}
/**
* A convenience method to write a formatted string to this output stream
* using the specified format string and arguments.
*
* <p> An invocation of this method of the form <tt>out.printf(format,
* args)</tt> behaves in exactly the same way as the invocation
*
* <pre>
* out.format(format, args) </pre>
*
* @param format
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* <cite>The Java™ Virtual Machine Specification</cite>.
* The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws java.util.IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section of the
* formatter class specification.
*
* @throws NullPointerException
* If the <tt>format</tt> is <tt>null</tt>
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream printf(String format, Object ... args) {
return format(format, args);
}
/**
* A convenience method to write a formatted string to this output stream
* using the specified format string and arguments.
*
* <p> An invocation of this method of the form <tt>out.printf(l, format,
* args)</tt> behaves in exactly the same way as the invocation
*
* <pre>
* out.format(l, format, args) </pre>
*
* @param l
* The {@linkplain java.util.Locale locale} to apply during
* formatting. If <tt>l</tt> is <tt>null</tt> then no localization
* is applied.
*
* @param format
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* <cite>The Java™ Virtual Machine Specification</cite>.
* The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws java.util.IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section of the
* formatter class specification.
*
* @throws NullPointerException
* If the <tt>format</tt> is <tt>null</tt>
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream printf(Locale l, String format, Object ... args) {
return format(l, format, args);
}
/**
* Writes a formatted string to this output stream using the specified
* format string and arguments.
*
* <p> The locale always used is the one returned by {@link
* java.util.Locale#getDefault() Locale.getDefault()}, regardless of any
* previous invocations of other formatting methods on this object.
*
* @param format
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* <cite>The Java™ Virtual Machine Specification</cite>.
* The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws java.util.IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section of the
* formatter class specification.
*
* @throws NullPointerException
* If the <tt>format</tt> is <tt>null</tt>
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream format(String format, Object ... args) {
try {
synchronized (this) {
ensureOpen();
if ((formatter == null)
|| (formatter.locale() != Locale.getDefault()))
formatter = new Formatter((Appendable) WeakProxy.forObject(this));
formatter.format(Locale.getDefault(), format, args);
}
} catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
} catch (IOException x) {
trouble = true;
}
return this;
}
/**
* Writes a formatted string to this output stream using the specified
* format string and arguments.
*
* @param l
* The {@linkplain java.util.Locale locale} to apply during
* formatting. If <tt>l</tt> is <tt>null</tt> then no localization
* is applied.
*
* @param format
* A format string as described in <a
* href="../util/Formatter.html#syntax">Format string syntax</a>
*
* @param args
* Arguments referenced by the format specifiers in the format
* string. If there are more arguments than format specifiers, the
* extra arguments are ignored. The number of arguments is
* variable and may be zero. The maximum number of arguments is
* limited by the maximum dimension of a Java array as defined by
* <cite>The Java™ Virtual Machine Specification</cite>.
* The behaviour on a
* <tt>null</tt> argument depends on the <a
* href="../util/Formatter.html#syntax">conversion</a>.
*
* @throws java.util.IllegalFormatException
* If a format string contains an illegal syntax, a format
* specifier that is incompatible with the given arguments,
* insufficient arguments given the format string, or other
* illegal conditions. For specification of all possible
* formatting errors, see the <a
* href="../util/Formatter.html#detail">Details</a> section of the
* formatter class specification.
*
* @throws NullPointerException
* If the <tt>format</tt> is <tt>null</tt>
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream format(Locale l, String format, Object ... args) {
try {
synchronized (this) {
ensureOpen();
if ((formatter == null)
|| (formatter.locale() != l))
formatter = new Formatter(WeakProxy.forObject(this), l);
formatter.format(l, format, args);
}
} catch (InterruptedIOException x) {
Thread.currentThread().interrupt();
} catch (IOException x) {
trouble = true;
}
return this;
}
/**
* Appends the specified character sequence to this output stream.
*
* <p> An invocation of this method of the form <tt>out.append(csq)</tt>
* behaves in exactly the same way as the invocation
*
* <pre>
* out.print(csq.toString()) </pre>
*
* <p> Depending on the specification of <tt>toString</tt> for the
* character sequence <tt>csq</tt>, the entire sequence may not be
* appended. For instance, invoking then <tt>toString</tt> method of a
* character buffer will return a subsequence whose content depends upon
* the buffer's position and limit.
*
* @param csq
* The character sequence to append. If <tt>csq</tt> is
* <tt>null</tt>, then the four characters <tt>"null"</tt> are
* appended to this output stream.
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream append(CharSequence csq) {
if (csq == null)
print("null");
else
print(csq.toString());
return this;
}
/**
* Appends a subsequence of the specified character sequence to this output
* stream.
*
* <p> An invocation of this method of the form <tt>out.append(csq, start,
* end)</tt> when <tt>csq</tt> is not <tt>null</tt>, behaves in
* exactly the same way as the invocation
*
* <pre>
* out.print(csq.subSequence(start, end).toString()) </pre>
*
* @param csq
* The character sequence from which a subsequence will be
* appended. If <tt>csq</tt> is <tt>null</tt>, then characters
* will be appended as if <tt>csq</tt> contained the four
* characters <tt>"null"</tt>.
*
* @param start
* The index of the first character in the subsequence
*
* @param end
* The index of the character following the last character in the
* subsequence
*
* @return This output stream
*
* @throws IndexOutOfBoundsException
* If <tt>start</tt> or <tt>end</tt> are negative, <tt>start</tt>
* is greater than <tt>end</tt>, or <tt>end</tt> is greater than
* <tt>csq.length()</tt>
*
* @since 1.5
*/
public PrintStream append(CharSequence csq, int start, int end) {
CharSequence cs = (csq == null ? "null" : csq);
write(cs.subSequence(start, end).toString());
return this;
}
/**
* Appends the specified character to this output stream.
*
* <p> An invocation of this method of the form <tt>out.append(c)</tt>
* behaves in exactly the same way as the invocation
*
* <pre>
* out.print(c) </pre>
*
* @param c
* The 16-bit character to append
*
* @return This output stream
*
* @since 1.5
*/
public PrintStream append(char c) {
print(c);
return this;
}
}
| life-beam/j2objc | jre_emul/android/platform/libcore/ojluni/src/main/java/java/io/PrintStream.java | Java | apache-2.0 | 41,821 |
/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package e2e
import (
"bytes"
"fmt"
"math"
"sort"
"strconv"
"strings"
"sync"
"text/tabwriter"
"time"
. "github.com/onsi/gomega"
"k8s.io/kubernetes/pkg/api"
client "k8s.io/kubernetes/pkg/client/unversioned"
utilruntime "k8s.io/kubernetes/pkg/util/runtime"
)
const (
resourceDataGatheringPeriodSeconds = 60
)
type resourceConstraint struct {
cpuConstraint float64
memoryConstraint uint64
}
type containerResourceGatherer struct {
usageTimeseries map[time.Time]resourceUsagePerContainer
stopCh chan struct{}
wg sync.WaitGroup
}
type SingleContainerSummary struct {
Name string
Cpu float64
Mem uint64
}
// we can't have int here, as JSON does not accept integer keys.
type ResourceUsageSummary map[string][]SingleContainerSummary
func (s *ResourceUsageSummary) PrintHumanReadable() string {
buf := &bytes.Buffer{}
w := tabwriter.NewWriter(buf, 1, 0, 1, ' ', 0)
for perc, summaries := range *s {
buf.WriteString(fmt.Sprintf("%v percentile:\n", perc))
fmt.Fprintf(w, "container\tcpu(cores)\tmemory(MB)\n")
for _, summary := range summaries {
fmt.Fprintf(w, "%q\t%.3f\t%.2f\n", summary.Name, summary.Cpu, float64(summary.Mem)/(1024*1024))
}
w.Flush()
}
return buf.String()
}
func (s *ResourceUsageSummary) PrintJSON() string {
return prettyPrintJSON(*s)
}
func (g *containerResourceGatherer) startGatheringData(c *client.Client, period time.Duration) {
g.usageTimeseries = make(map[time.Time]resourceUsagePerContainer)
g.wg.Add(1)
g.stopCh = make(chan struct{})
go func() error {
defer utilruntime.HandleCrash()
defer g.wg.Done()
for {
select {
case <-time.After(period):
now := time.Now()
data, err := g.getKubeSystemContainersResourceUsage(c)
if err != nil {
Logf("Error while getting resource usage: %v", err)
continue
}
g.usageTimeseries[now] = data
case <-g.stopCh:
Logf("Stop channel is closed. Stopping gatherer.")
return nil
}
}
}()
}
func (g *containerResourceGatherer) stopAndSummarize(percentiles []int, constraints map[string]resourceConstraint) *ResourceUsageSummary {
close(g.stopCh)
Logf("Closed stop channel.")
g.wg.Wait()
Logf("Waitgroup finished.")
if len(percentiles) == 0 {
Logf("Warning! Empty percentile list for stopAndPrintData.")
return &ResourceUsageSummary{}
}
stats := g.computePercentiles(g.usageTimeseries, percentiles)
sortedKeys := []string{}
for name := range stats[percentiles[0]] {
sortedKeys = append(sortedKeys, name)
}
sort.Strings(sortedKeys)
violatedConstraints := make([]string, 0)
summary := make(ResourceUsageSummary)
for _, perc := range percentiles {
for _, name := range sortedKeys {
usage := stats[perc][name]
summary[strconv.Itoa(perc)] = append(summary[strconv.Itoa(perc)], SingleContainerSummary{
Name: name,
Cpu: usage.CPUUsageInCores,
Mem: usage.MemoryWorkingSetInBytes,
})
// Verifying 99th percentile of resource usage
if perc == 99 {
// Name has a form: <pod_name>/<container_name>
containerName := strings.Split(name, "/")[1]
if constraint, ok := constraints[containerName]; ok {
if usage.CPUUsageInCores > constraint.cpuConstraint {
violatedConstraints = append(
violatedConstraints,
fmt.Sprintf("Container %v is using %v/%v CPU",
name,
usage.CPUUsageInCores,
constraint.cpuConstraint,
),
)
}
if usage.MemoryWorkingSetInBytes > constraint.memoryConstraint {
violatedConstraints = append(
violatedConstraints,
fmt.Sprintf("Container %v is using %v/%v MB of memory",
name,
float64(usage.MemoryWorkingSetInBytes)/(1024*1024),
float64(constraint.memoryConstraint)/(1024*1024),
),
)
}
}
}
}
}
Expect(violatedConstraints).To(BeEmpty())
return &summary
}
func (g *containerResourceGatherer) computePercentiles(timeSeries map[time.Time]resourceUsagePerContainer, percentilesToCompute []int) map[int]resourceUsagePerContainer {
if len(timeSeries) == 0 {
return make(map[int]resourceUsagePerContainer)
}
dataMap := make(map[string]*usageDataPerContainer)
for _, singleStatistic := range timeSeries {
for name, data := range singleStatistic {
if dataMap[name] == nil {
dataMap[name] = &usageDataPerContainer{
cpuData: make([]float64, len(timeSeries)),
memUseData: make([]uint64, len(timeSeries)),
memWorkSetData: make([]uint64, len(timeSeries)),
}
}
dataMap[name].cpuData = append(dataMap[name].cpuData, data.CPUUsageInCores)
dataMap[name].memUseData = append(dataMap[name].memUseData, data.MemoryUsageInBytes)
dataMap[name].memWorkSetData = append(dataMap[name].memWorkSetData, data.MemoryWorkingSetInBytes)
}
}
for _, v := range dataMap {
sort.Float64s(v.cpuData)
sort.Sort(uint64arr(v.memUseData))
sort.Sort(uint64arr(v.memWorkSetData))
}
result := make(map[int]resourceUsagePerContainer)
for _, perc := range percentilesToCompute {
data := make(resourceUsagePerContainer)
for k, v := range dataMap {
percentileIndex := int(math.Ceil(float64(len(v.cpuData)*perc)/100)) - 1
data[k] = &containerResourceUsage{
Name: k,
CPUUsageInCores: v.cpuData[percentileIndex],
MemoryUsageInBytes: v.memUseData[percentileIndex],
MemoryWorkingSetInBytes: v.memWorkSetData[percentileIndex],
}
}
result[perc] = data
}
return result
}
func (g *containerResourceGatherer) getKubeSystemContainersResourceUsage(c *client.Client) (resourceUsagePerContainer, error) {
pods, err := c.Pods("kube-system").List(api.ListOptions{})
if err != nil {
return resourceUsagePerContainer{}, err
}
nodes, err := c.Nodes().List(api.ListOptions{})
if err != nil {
return resourceUsagePerContainer{}, err
}
containerIDToNameMap := make(map[string]string)
containerIDs := make([]string, 0)
for _, pod := range pods.Items {
for _, container := range pod.Status.ContainerStatuses {
containerID := strings.TrimPrefix(container.ContainerID, "docker:/")
containerIDToNameMap[containerID] = pod.Name + "/" + container.Name
containerIDs = append(containerIDs, containerID)
}
}
mutex := sync.Mutex{}
wg := sync.WaitGroup{}
wg.Add(len(nodes.Items))
errors := make([]error, 0)
nameToUsageMap := make(resourceUsagePerContainer, len(containerIDToNameMap))
for _, node := range nodes.Items {
go func(nodeName string) {
defer utilruntime.HandleCrash()
defer wg.Done()
nodeUsage, err := getOneTimeResourceUsageOnNode(c, nodeName, 15*time.Second, func() []string { return containerIDs }, true)
mutex.Lock()
defer mutex.Unlock()
if err != nil {
errors = append(errors, err)
return
}
for k, v := range nodeUsage {
nameToUsageMap[containerIDToNameMap[k]] = v
}
}(node.Name)
}
wg.Wait()
if len(errors) != 0 {
return resourceUsagePerContainer{}, fmt.Errorf("Errors while gathering usage data: %v", errors)
}
return nameToUsageMap, nil
}
| swagiaal/kubernetes | test/e2e/resource_usage_gatherer.go | GO | apache-2.0 | 7,581 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.test.cmmn.converter;
import static org.assertj.core.api.Assertions.assertThat;
import org.flowable.cmmn.model.CasePageTask;
import org.flowable.cmmn.model.CmmnModel;
import org.flowable.cmmn.model.PlanItemDefinition;
import org.flowable.test.cmmn.converter.util.CmmnXmlConverterTest;
/**
* @author Filip Hrisafov
*/
public class CasePageTaskSameDeploymentCmmnXmlConverterTest {
@CmmnXmlConverterTest("org/flowable/test/cmmn/converter/casePageTaskSameDeployment.cmmn")
public void validateModel(CmmnModel cmmnModel) {
assertThat(cmmnModel).isNotNull();
PlanItemDefinition casePageDefinition = cmmnModel.findPlanItemDefinition("casePageTaskSameDeployment");
assertThat(casePageDefinition)
.isInstanceOfSatisfying(CasePageTask.class, task -> {
assertThat(task.getFormKey()).isEqualTo("testKey");
assertThat(task.isSameDeployment()).isTrue();
});
assertThat(casePageDefinition.getAttributes()).isEmpty();
casePageDefinition = cmmnModel.findPlanItemDefinition("casePageTaskSameDeploymentFalse");
assertThat(casePageDefinition)
.isInstanceOfSatisfying(CasePageTask.class, task -> {
assertThat(task.getFormKey()).isEqualTo("testKey2");
assertThat(task.isSameDeployment()).isFalse();
});
assertThat(casePageDefinition.getAttributes()).isEmpty();
casePageDefinition = cmmnModel.findPlanItemDefinition("casePageTaskSameDeploymentGlobal");
assertThat(casePageDefinition)
.isInstanceOfSatisfying(CasePageTask.class, task -> {
assertThat(task.getFormKey()).isEqualTo("testKey3");
assertThat(task.isSameDeployment()).isTrue();
});
assertThat(casePageDefinition.getAttributes()).isEmpty();
}
}
| dbmalkovsky/flowable-engine | modules/flowable-cmmn-converter/src/test/java/org/flowable/test/cmmn/converter/CasePageTaskSameDeploymentCmmnXmlConverterTest.java | Java | apache-2.0 | 2,479 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/clouddirectory/model/BatchUpdateObjectAttributesResponse.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace CloudDirectory
{
namespace Model
{
BatchUpdateObjectAttributesResponse::BatchUpdateObjectAttributesResponse() :
m_objectIdentifierHasBeenSet(false)
{
}
BatchUpdateObjectAttributesResponse::BatchUpdateObjectAttributesResponse(JsonView jsonValue) :
m_objectIdentifierHasBeenSet(false)
{
*this = jsonValue;
}
BatchUpdateObjectAttributesResponse& BatchUpdateObjectAttributesResponse::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("ObjectIdentifier"))
{
m_objectIdentifier = jsonValue.GetString("ObjectIdentifier");
m_objectIdentifierHasBeenSet = true;
}
return *this;
}
JsonValue BatchUpdateObjectAttributesResponse::Jsonize() const
{
JsonValue payload;
if(m_objectIdentifierHasBeenSet)
{
payload.WithString("ObjectIdentifier", m_objectIdentifier);
}
return payload;
}
} // namespace Model
} // namespace CloudDirectory
} // namespace Aws
| cedral/aws-sdk-cpp | aws-cpp-sdk-clouddirectory/source/model/BatchUpdateObjectAttributesResponse.cpp | C++ | apache-2.0 | 1,706 |
package com.ctrip.zeus.task.clean;
import com.ctrip.zeus.clean.CleanDbManager;
import com.ctrip.zeus.task.AbstractTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
/**
* Created by fanqq on 2016/1/5.
*/
@Component("cleanDbTask")
public class CleanDbTask extends AbstractTask {
Logger logger = LoggerFactory.getLogger(this.getClass());
@Resource
private CleanDbManager cleanDbManager;
@Override
public void start() {
}
@Override
public void run() throws Exception {
try {
logger.info("[CleanDbTask] clean db task started.");
cleanDbManager.run();
logger.info("[CleanDbTask] clean db task finished.");
} catch (Exception e) {
logger.warn("[clean db job] clean db exception." + e.getMessage(), e);
}
}
@Override
public void stop() {
}
@Override
public long getInterval() {
return 60000 * 30;
}
}
| sdgdsffdsfff/zeus | slb/src/main/java/com/ctrip/zeus/task/clean/CleanDbTask.java | Java | apache-2.0 | 1,085 |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.domain.builders;
import org.mifos.dto.screen.ClientPersonalDetailDto;
public class ClientPersonalDetailDtoBuilder {
private Integer ethinicity;
private Integer citizenship;
private Integer handicapped;
private Integer businessActivities;
private Integer maritalStatus;
private Integer educationLevel;
private Short numChildren;
private Short gender;
private Short povertyStatus;
public ClientPersonalDetailDto build() {
return new ClientPersonalDetailDto(ethinicity, citizenship, handicapped, businessActivities, maritalStatus, educationLevel, numChildren, gender, povertyStatus);
}
} | vorburger/mifos-head | application/src/test/java/org/mifos/domain/builders/ClientPersonalDetailDtoBuilder.java | Java | apache-2.0 | 1,421 |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.fixture;
import com.thoughtworks.go.util.GoConstants;
import com.thoughtworks.go.util.SystemEnvironment;
public class DatabaseDiskIsLow implements PreCondition {
@Override
public void onSetUp() throws Exception {
new SystemEnvironment().setProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT, "1m");
new SystemEnvironment().setProperty(SystemEnvironment.DATABASE_WARNING_SIZE_LIMIT, "11222334m");
}
@Override
public void onTearDown() throws Exception {
new SystemEnvironment().clearProperty(SystemEnvironment.DATABASE_FULL_SIZE_LIMIT);
new SystemEnvironment().clearProperty(SystemEnvironment.DATABASE_WARNING_SIZE_LIMIT);
}
public long getLowLimit() {
return 1 * GoConstants.MEGA_BYTE;
}
}
| marques-work/gocd | server/src/test-integration/java/com/thoughtworks/go/fixture/DatabaseDiskIsLow.java | Java | apache-2.0 | 1,397 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#if defined(XALAN_BUILD_DEPRECATED_DOM_BRIDGE)
#include "XercesBridgeHelper.hpp"
#if XERCES_VERSION_MAJOR >= 2
#include <xercesc/dom/deprecated/DOM_CharacterData.hpp>
#else
#include <xercesc/dom/DOM_CharacterData.hpp>
#endif
#include <xalanc/XercesParserLiaison/XercesDOMException.hpp>
namespace XALAN_CPP_NAMESPACE {
void
XercesBridgeHelper::setNodeValue(
DOM_NodeType& theXercesNode,
const XalanDOMString& nodeValue)
{
try
{
theXercesNode.setNodeValue(XalanDOMStringToXercesDOMString(nodeValue));
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
void
XercesBridgeHelper::normalize(DOM_NodeType& theXercesNode)
{
try
{
theXercesNode.normalize();
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
bool
XercesBridgeHelper::isSupported(
const DOM_NodeType& theXercesNode,
const XalanDOMString& feature,
const XalanDOMString& version)
{
return theXercesNode.isSupported(
XalanDOMStringToXercesDOMString(feature),
XalanDOMStringToXercesDOMString(version));
}
void
XercesBridgeHelper::setPrefix(
DOM_NodeType& theXercesNode,
const XalanDOMString& prefix)
{
try
{
theXercesNode.setPrefix(XalanDOMStringToXercesDOMString(prefix));
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
const XalanDOMString
XercesBridgeHelper::substringData(
const DOM_CharacterDataType& theXercesNode,
unsigned int offset,
unsigned int count)
{
try
{
const DOMStringType theString(theXercesNode.substringData(offset, count));
return XalanDOMString(theString.rawBuffer(), theString.length());
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
void
XercesBridgeHelper::appendData(
DOM_CharacterDataType& theXercesNode,
const XalanDOMString& arg)
{
try
{
theXercesNode.appendData(XalanDOMStringToXercesDOMString(arg));
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
void
XercesBridgeHelper::insertData(
DOM_CharacterDataType& theXercesNode,
unsigned int offset,
const XalanDOMString& arg)
{
try
{
theXercesNode.insertData(offset, XalanDOMStringToXercesDOMString(arg));
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
void
XercesBridgeHelper::deleteData(
DOM_CharacterDataType& theXercesNode,
unsigned int offset,
unsigned int count)
{
try
{
theXercesNode.deleteData(offset, count);
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
void
XercesBridgeHelper::replaceData(
DOM_CharacterDataType& theXercesNode,
unsigned int offset,
unsigned int count,
const XalanDOMString& arg)
{
try
{
theXercesNode.replaceData(offset, count, XalanDOMStringToXercesDOMString(arg));
}
catch(const DOM_DOMExceptionType& theException)
{
throw XercesDOMException(theException);
}
}
}
#endif //XALAN_BUILD_DEPRECATED_DOM_BRIDGE
| apache/xalan-c | src/xalanc/XercesParserLiaison/Deprecated/XercesBridgeHelper.cpp | C++ | apache-2.0 | 4,549 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ds/model/UpdateConditionalForwarderRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::DirectoryService::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
UpdateConditionalForwarderRequest::UpdateConditionalForwarderRequest() :
m_directoryIdHasBeenSet(false),
m_remoteDomainNameHasBeenSet(false),
m_dnsIpAddrsHasBeenSet(false)
{
}
Aws::String UpdateConditionalForwarderRequest::SerializePayload() const
{
JsonValue payload;
if(m_directoryIdHasBeenSet)
{
payload.WithString("DirectoryId", m_directoryId);
}
if(m_remoteDomainNameHasBeenSet)
{
payload.WithString("RemoteDomainName", m_remoteDomainName);
}
if(m_dnsIpAddrsHasBeenSet)
{
Array<JsonValue> dnsIpAddrsJsonList(m_dnsIpAddrs.size());
for(unsigned dnsIpAddrsIndex = 0; dnsIpAddrsIndex < dnsIpAddrsJsonList.GetLength(); ++dnsIpAddrsIndex)
{
dnsIpAddrsJsonList[dnsIpAddrsIndex].AsString(m_dnsIpAddrs[dnsIpAddrsIndex]);
}
payload.WithArray("DnsIpAddrs", std::move(dnsIpAddrsJsonList));
}
return payload.View().WriteReadable();
}
Aws::Http::HeaderValueCollection UpdateConditionalForwarderRequest::GetRequestSpecificHeaders() const
{
Aws::Http::HeaderValueCollection headers;
headers.insert(Aws::Http::HeaderValuePair("X-Amz-Target", "DirectoryService_20150416.UpdateConditionalForwarder"));
return headers;
}
| cedral/aws-sdk-cpp | aws-cpp-sdk-ds/source/model/UpdateConditionalForwarderRequest.cpp | C++ | apache-2.0 | 2,001 |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.utils.logging;
import org.slf4j.Logger;
import org.slf4j.Marker;
import static com.google.common.base.MoreObjects.toStringHelper;
/**
* Delegating logger.
*/
public class DelegatingLogger implements Logger {
private final Logger delegate;
public DelegatingLogger(Logger delegate) {
this.delegate = delegate;
}
@Override
public String getName() {
return delegate.getName();
}
@Override
public boolean isTraceEnabled() {
return delegate.isTraceEnabled();
}
@Override
public void trace(String msg) {
delegate.trace(msg);
}
@Override
public void trace(String format, Object arg) {
delegate.trace(format, arg);
}
@Override
public void trace(String format, Object arg1, Object arg2) {
delegate.trace(format, arg1, arg2);
}
@Override
public void trace(String format, Object... arguments) {
delegate.trace(format, arguments);
}
@Override
public void trace(String msg, Throwable t) {
delegate.trace(msg, t);
}
@Override
public boolean isTraceEnabled(Marker marker) {
return delegate.isTraceEnabled(marker);
}
@Override
public void trace(Marker marker, String msg) {
delegate.trace(marker, msg);
}
@Override
public void trace(Marker marker, String format, Object arg) {
delegate.trace(marker, format, arg);
}
@Override
public void trace(Marker marker, String format, Object arg1, Object arg2) {
delegate.trace(marker, format, arg1, arg2);
}
@Override
public void trace(Marker marker, String format, Object... argArray) {
delegate.trace(marker, format, argArray);
}
@Override
public void trace(Marker marker, String msg, Throwable t) {
delegate.trace(marker, msg, t);
}
@Override
public boolean isDebugEnabled() {
return delegate.isDebugEnabled();
}
@Override
public void debug(String msg) {
delegate.debug(msg);
}
@Override
public void debug(String format, Object arg) {
delegate.debug(format, arg);
}
@Override
public void debug(String format, Object arg1, Object arg2) {
delegate.debug(format, arg1, arg2);
}
@Override
public void debug(String format, Object... arguments) {
delegate.debug(format, arguments);
}
@Override
public void debug(String msg, Throwable t) {
delegate.debug(msg, t);
}
@Override
public boolean isDebugEnabled(Marker marker) {
return delegate.isDebugEnabled(marker);
}
@Override
public void debug(Marker marker, String msg) {
delegate.debug(marker, msg);
}
@Override
public void debug(Marker marker, String format, Object arg) {
delegate.debug(marker, format, arg);
}
@Override
public void debug(Marker marker, String format, Object arg1, Object arg2) {
delegate.debug(marker, format, arg1, arg2);
}
@Override
public void debug(Marker marker, String format, Object... arguments) {
delegate.debug(marker, format, arguments);
}
@Override
public void debug(Marker marker, String msg, Throwable t) {
delegate.debug(marker, msg, t);
}
@Override
public boolean isInfoEnabled() {
return delegate.isInfoEnabled();
}
@Override
public void info(String msg) {
delegate.info(msg);
}
@Override
public void info(String format, Object arg) {
delegate.info(format, arg);
}
@Override
public void info(String format, Object arg1, Object arg2) {
delegate.info(format, arg1, arg2);
}
@Override
public void info(String format, Object... arguments) {
delegate.info(format, arguments);
}
@Override
public void info(String msg, Throwable t) {
delegate.info(msg, t);
}
@Override
public boolean isInfoEnabled(Marker marker) {
return delegate.isInfoEnabled(marker);
}
@Override
public void info(Marker marker, String msg) {
delegate.info(marker, msg);
}
@Override
public void info(Marker marker, String format, Object arg) {
delegate.info(marker, format, arg);
}
@Override
public void info(Marker marker, String format, Object arg1, Object arg2) {
delegate.info(marker, format, arg1, arg2);
}
@Override
public void info(Marker marker, String format, Object... arguments) {
delegate.info(marker, format, arguments);
}
@Override
public void info(Marker marker, String msg, Throwable t) {
delegate.info(marker, msg, t);
}
@Override
public boolean isWarnEnabled() {
return delegate.isWarnEnabled();
}
@Override
public void warn(String msg) {
delegate.warn(msg);
}
@Override
public void warn(String format, Object arg) {
delegate.warn(format, arg);
}
@Override
public void warn(String format, Object... arguments) {
delegate.warn(format, arguments);
}
@Override
public void warn(String format, Object arg1, Object arg2) {
delegate.warn(format, arg1, arg2);
}
@Override
public void warn(String msg, Throwable t) {
delegate.warn(msg, t);
}
@Override
public boolean isWarnEnabled(Marker marker) {
return delegate.isWarnEnabled(marker);
}
@Override
public void warn(Marker marker, String msg) {
delegate.warn(marker, msg);
}
@Override
public void warn(Marker marker, String format, Object arg) {
delegate.warn(marker, format, arg);
}
@Override
public void warn(Marker marker, String format, Object arg1, Object arg2) {
delegate.warn(marker, format, arg1, arg2);
}
@Override
public void warn(Marker marker, String format, Object... arguments) {
delegate.warn(marker, format, arguments);
}
@Override
public void warn(Marker marker, String msg, Throwable t) {
delegate.warn(marker, msg, t);
}
@Override
public boolean isErrorEnabled() {
return delegate.isErrorEnabled();
}
@Override
public void error(String msg) {
delegate.error(msg);
}
@Override
public void error(String format, Object arg) {
delegate.error(format, arg);
}
@Override
public void error(String format, Object arg1, Object arg2) {
delegate.error(format, arg1, arg2);
}
@Override
public void error(String format, Object... arguments) {
delegate.error(format, arguments);
}
@Override
public void error(String msg, Throwable t) {
delegate.error(msg, t);
}
@Override
public boolean isErrorEnabled(Marker marker) {
return delegate.isErrorEnabled(marker);
}
@Override
public void error(Marker marker, String msg) {
delegate.error(marker, msg);
}
@Override
public void error(Marker marker, String format, Object arg) {
delegate.error(marker, format, arg);
}
@Override
public void error(Marker marker, String format, Object arg1, Object arg2) {
delegate.error(marker, format, arg1, arg2);
}
@Override
public void error(Marker marker, String format, Object... arguments) {
delegate.error(marker, format, arguments);
}
@Override
public void error(Marker marker, String msg, Throwable t) {
delegate.error(marker, msg, t);
}
@Override
public String toString() {
return toStringHelper(this)
.addValue(delegate)
.toString();
}
}
| atomix/atomix | utils/src/main/java/io/atomix/utils/logging/DelegatingLogger.java | Java | apache-2.0 | 7,703 |
// Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package com.xiaomi.infra.galaxy.common.model;
import com.xiaomi.infra.galaxy.common.GalaxyClientException;
import com.xiaomi.infra.galaxy.common.constants.ReturnCode;
import java.util.ArrayList;
import java.util.List;
// Referenced classes of package com.xiaomi.infra.galaxy.common.model:
// AttributeValue, Comparator, AttributeType, Verifier
public class Condition
{
private String attributeName;
private List attributeValues;
private String comparator;
public Condition()
{
}
public Condition addAttributeValue(Object obj)
{
if (attributeValues == null)
{
attributeValues = new ArrayList();
}
AttributeValue.putAttributeValueInList(attributeValues, obj);
return this;
}
public boolean equals(Object obj)
{
if (this != obj) goto _L2; else goto _L1
_L1:
boolean flag = true;
_L4:
return flag;
_L2:
flag = false;
if (obj == null) goto _L4; else goto _L3
_L3:
boolean flag1;
flag1 = obj instanceof Condition;
flag = false;
if (!flag1) goto _L4; else goto _L5
_L5:
Condition condition = (Condition)obj;
boolean flag2;
boolean flag3;
boolean flag4;
boolean flag7;
boolean flag8;
boolean flag9;
if (condition.getComparator() == null)
{
flag2 = true;
} else
{
flag2 = false;
}
if (getComparator() == null)
{
flag3 = true;
} else
{
flag3 = false;
}
flag4 = flag2 ^ flag3;
flag = false;
if (flag4) goto _L4; else goto _L6
_L6:
if (condition.getComparator() == null)
{
break; /* Loop/switch isn't completed */
}
flag9 = condition.getComparator().equals(getComparator());
flag = false;
if (!flag9) goto _L4; else goto _L7
_L7:
boolean flag5;
boolean flag6;
if (condition.getAttributeValues() == null)
{
flag5 = true;
} else
{
flag5 = false;
}
if (getAttributeValues() == null)
{
flag6 = true;
} else
{
flag6 = false;
}
flag7 = flag5 ^ flag6;
flag = false;
if (flag7) goto _L4; else goto _L8
_L8:
if (condition.getAttributeValues() == null)
{
break; /* Loop/switch isn't completed */
}
flag8 = condition.getAttributeValues().equals(getAttributeValues());
flag = false;
if (!flag8) goto _L4; else goto _L9
_L9:
return true;
}
public String getAttributeName()
{
return attributeName;
}
public List getAttributeValues()
{
return attributeValues;
}
public String getComparator()
{
return comparator;
}
public int hashCode()
{
int i;
int j;
List list;
int k;
if (getComparator() == null)
{
i = 0;
} else
{
i = getComparator().hashCode();
}
j = 31 * (i + 31);
list = getAttributeValues();
k = 0;
if (list != null)
{
k = getAttributeValues().hashCode();
}
return j + k;
}
public void setAttributeName(String s)
{
attributeName = s;
}
public void setAttributeValues(List list)
{
attributeValues = list;
}
public void setComparator(String s)
{
comparator = s;
}
public void validate()
{
Comparator comparator1;
AttributeType attributetype;
comparator1 = Comparator.fromValue(comparator);
if (attributeValues == null || attributeValues.size() != 1)
{
throw new GalaxyClientException(ReturnCode.UNEXPECTED_NUMBER_OF_OPERANDS, "comparator can only use one attribute value");
}
attributetype = AttributeType.fromValue(((AttributeValue)attributeValues.get(0)).getType());
Verifier.validateAttributeValue(attributeName, (AttributeValue)attributeValues.get(0), false);
_cls1..SwitchMap.com.xiaomi.infra.galaxy.common.model.Comparator[comparator1.ordinal()];
JVM INSTR tableswitch 1 6: default 128
// 1 221
// 2 221
// 3 179
// 4 222
// 5 265
// 6 308;
goto _L1 _L2 _L2 _L3 _L4 _L5 _L6
_L1:
throw new GalaxyClientException(ReturnCode.UNEXPECTED_OPERAND_TYPE, (new StringBuilder()).append(attributetype.name()).append("(comparator:").append(comparator1).append(" ,attributeType").append(attributetype).append(")").toString());
_L3:
if (AttributeType.BINARY != attributetype && AttributeType.STRING != attributetype && AttributeType.INT8 != attributetype && AttributeType.INT16 != attributetype && AttributeType.INT32 != attributetype && AttributeType.INT64 != attributetype)
{
break; /* Loop/switch isn't completed */
}
_L2:
return;
_L4:
if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype)
{
return;
}
break; /* Loop/switch isn't completed */
_L5:
if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype)
{
return;
}
break; /* Loop/switch isn't completed */
_L6:
if (AttributeType.BINARY == attributetype || AttributeType.STRING == attributetype || AttributeType.INT8 == attributetype || AttributeType.INT16 == attributetype || AttributeType.INT32 == attributetype || AttributeType.INT64 == attributetype)
{
return;
}
if (true) goto _L1; else goto _L7
_L7:
}
public Condition withAttributeValues(List list)
{
setAttributeValues(list);
return this;
}
public Condition withComparator(Comparator comparator1)
{
setComparator(comparator1.name());
return this;
}
private class _cls1
{
static final int $SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[];
static
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator = new int[Comparator.values().length];
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.EQ.ordinal()] = 1;
}
catch (NoSuchFieldError nosuchfielderror) { }
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.NE.ordinal()] = 2;
}
catch (NoSuchFieldError nosuchfielderror1) { }
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.GT.ordinal()] = 3;
}
catch (NoSuchFieldError nosuchfielderror2) { }
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.GE.ordinal()] = 4;
}
catch (NoSuchFieldError nosuchfielderror3) { }
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.LT.ordinal()] = 5;
}
catch (NoSuchFieldError nosuchfielderror4) { }
try
{
$SwitchMap$com$xiaomi$infra$galaxy$common$model$Comparator[Comparator.LE.ordinal()] = 6;
}
catch (NoSuchFieldError nosuchfielderror5)
{
return;
}
}
}
}
| vishnudevk/MiBandDecompiled | Original Files/source/src/com/xiaomi/infra/galaxy/common/model/Condition.java | Java | apache-2.0 | 8,239 |
package com.marvinlabs.widget.progresspanel.demo;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import com.marvinlabs.widget.progresspanel.demo.R;
public class DemoListActivity extends FragmentActivity implements DemoListFragment.OnDemoSelectedListener {
private static final String TAG_LIST_FRAGMENT = "ListFragment";
private static final String TAG_DEMO_FRAGMENT = "DemoFragment";
private DemoListFragment listFragment;
private boolean isDualPane;
private int currentDemo;
/**
* Creates a fragment given a fully qualified class name and some arguments
*/
public static Fragment newFragmentInstance(String className, Bundle args) {
try {
Class<?> c = Class.forName(className);
Fragment f = (Fragment) c.newInstance();
f.setArguments(args);
return f;
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot create fragment", e);
} catch (InstantiationException e) {
throw new RuntimeException("Cannot create fragment", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Cannot create fragment", e);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_demolist);
listFragment = (DemoListFragment) getSupportFragmentManager().findFragmentByTag(TAG_LIST_FRAGMENT);
if (listFragment == null) {
listFragment = DemoListFragment.newInstance();
getSupportFragmentManager().beginTransaction().replace(R.id.main_container, listFragment,
TAG_LIST_FRAGMENT).commitAllowingStateLoss();
}
}
@Override
protected void onStart() {
super.onStart();
listFragment.setSelectable(isDualPane);
}
@Override
public void onDemoSelected(String fragmentClass, Bundle args) {
Intent i = new Intent(this, DemoActivity.class);
i.putExtra(DemoActivity.EXTRA_FRAGMENT_CLASS, fragmentClass);
i.putExtra(DemoActivity.EXTRA_FRAGMENT_ARGS, args);
startActivity(i);
}
}
| 0359xiaodong/android-progress-panel | demo/src/main/java/com/marvinlabs/widget/progresspanel/demo/DemoListActivity.java | Java | apache-2.0 | 2,294 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.containeranalysis.v1alpha1.model;
/**
* Basis describes the base image portion (Note) of the DockerImage relationship. Linked occurrences
* are derived from this or an equivalent image via: FROM Or an equivalent reference, e.g. a tag of
* the resource_url.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Container Analysis API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Basis extends com.google.api.client.json.GenericJson {
/**
* The fingerprint of the base image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private Fingerprint fingerprint;
/**
* The resource_url for the resource representing the basis of associated occurrence images.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String resourceUrl;
/**
* The fingerprint of the base image.
* @return value or {@code null} for none
*/
public Fingerprint getFingerprint() {
return fingerprint;
}
/**
* The fingerprint of the base image.
* @param fingerprint fingerprint or {@code null} for none
*/
public Basis setFingerprint(Fingerprint fingerprint) {
this.fingerprint = fingerprint;
return this;
}
/**
* The resource_url for the resource representing the basis of associated occurrence images.
* @return value or {@code null} for none
*/
public java.lang.String getResourceUrl() {
return resourceUrl;
}
/**
* The resource_url for the resource representing the basis of associated occurrence images.
* @param resourceUrl resourceUrl or {@code null} for none
*/
public Basis setResourceUrl(java.lang.String resourceUrl) {
this.resourceUrl = resourceUrl;
return this;
}
@Override
public Basis set(String fieldName, Object value) {
return (Basis) super.set(fieldName, value);
}
@Override
public Basis clone() {
return (Basis) super.clone();
}
}
| googleapis/google-api-java-client-services | clients/google-api-services-containeranalysis/v1alpha1/1.31.0/com/google/api/services/containeranalysis/v1alpha1/model/Basis.java | Java | apache-2.0 | 2,988 |
package com.modesteam.urutau.model.system;
public class MemberPager {
private Integer page;
private Integer limit;
public Integer getPage() {
return page;
}
public void setPage(Integer page) {
this.page = page;
}
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
}
| Modesteam/Urutau | src/main/java/com/modesteam/urutau/model/system/MemberPager.java | Java | apache-2.0 | 342 |
#if UNITY_ANDROID
using UnityEngine;
using System.Collections;
namespace GB {
public class GBCommonAndroidHelper : GBAndroidHelper, ICommonHelper {
private static readonly string ANDROID_PLUGIN_CLASS_PACKAGE = "com.gebros.platform.unity.GBUnityPlugin";
private static readonly string ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE = "com.gebros.platform.unity.ApplicationPlugin";
private static readonly string SET_ACTIVE_MARKET = "setActiveMarket";
private static readonly string GB_SET_CONFIGURE = "configureWithGameInfo";
private static readonly string GB_GLOBAL_SERVER_INFO = "requestGlobalServerInfo";
private static readonly string GB_GET_MCC = "getMCC";
private static readonly string GB_GET_LANGUAGE = "getCurrentLanguage";
private static readonly string GB_GET_DEVICE_ID = "getDeviceId";
private static readonly string GB_GET_DEVICE_MODEL = "getDeviceModel";
private static readonly string GB_SHOW_TOAST = "ShowToast";
private static readonly string GB_SHOW_ALERT = "ShowAlert";
private static readonly string GB_GET_RUNTIME_PERMISSION = "GetRuntimePermission";
private static readonly string GB_CHECK_RUNTIME_PERMISSION = "CheckRuntimePermission";
public GBCommonAndroidHelper () {}
public void SetActiveMarket(GBSettings.Market market, GBRequest funcCall) {
UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE)) {
jc.CallStatic(SET_ACTIVE_MARKET, GBSettings.GetPlatformInfo(), funcCall.GetCallbackGameObjectName());
}
}));
}
public void ConfigureSDKWithGameInfo(string clientSecretKey, int gameCode, GBSettings.Market market, int logLevel) {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE) ) {
jc.CallStatic(GB_SET_CONFIGURE, clientSecretKey, gameCode, (int)market, logLevel);
}
}
public void ConfigureSDKWithGameInfo(string clientSecretKey, int gameCode, string platformInfo, int logLevel) {
// UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
// using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE) ) {
// jc.CallStatic(GB_SET_CONFIGURE, clientSecretKey, gameCode, platformInfo, logLevel);
// }
// }));
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE) ) {
jc.CallStatic(GB_SET_CONFIGURE, clientSecretKey, gameCode, platformInfo, logLevel);
}
}
public void RequestGlobalServerInfo(string branchURL, GBRequest funcCall) {
UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE)) {
jc.CallStatic(GB_GLOBAL_SERVER_INFO, branchURL, GBSettings.GameCode, funcCall.GetCallbackGameObjectName());
}
}));
}
public string GetMCC() {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
return jc.CallStatic<string>(GB_GET_MCC);
}
}
public string GetCurrentLanguage() {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
return jc.CallStatic<string>(GB_GET_LANGUAGE);
}
}
public string GetDeviceId() {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
return jc.CallStatic<string>(GB_GET_DEVICE_ID);
}
}
public string GetDeviceModel() {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
return jc.CallStatic<string>(GB_GET_DEVICE_MODEL);
}
}
public void ShowToast(string message) {
UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
jc.CallStatic(GB_SHOW_TOAST, message);
}
}));
}
public void ShowAlert() {
UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_UTILITY_PLUGIN_CLASS_PACKAGE)) {
jc.CallStatic(GB_SHOW_ALERT);
}
}));
}
public void GetRuntimePermission(string permission, bool isNecessary, GBRequest funcCall) {
UnityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
using (AndroidJavaClass jc = new AndroidJavaClass (ANDROID_PLUGIN_CLASS_PACKAGE)) {
jc.CallStatic (GB_GET_RUNTIME_PERMISSION, permission, isNecessary, funcCall.GetCallbackGameObjectName());
}
}));
}
public bool CheckRuntimePermission(string permission) {
using (AndroidJavaClass jc = new AndroidJavaClass(ANDROID_PLUGIN_CLASS_PACKAGE)) {
return jc.CallStatic<bool>(GB_CHECK_RUNTIME_PERMISSION, permission);
}
}
public void SendPushMessage(string userKey, string title, string message) {
/* Dummy */
}
}
}
#endif
| nairs77/GB-Unity-Plugin | Assets/GB/Script/Common/GBCommonAndroidHelper.cs | C# | apache-2.0 | 4,838 |
#pragma warning disable 109, 114, 219, 429, 168, 162
namespace haxe.lang{
public class Exceptions {
public Exceptions(){
unchecked {
#line 25 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
{
}
}
#line default
}
[System.ThreadStaticAttribute]
public static global::System.Exception exception;
}
}
#pragma warning disable 109, 114, 219, 429, 168, 162
namespace haxe.lang{
public class HaxeException : global::System.Exception {
public HaxeException(object obj) : base(){
unchecked {
#line 41 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
if (( obj is global::haxe.lang.HaxeException )) {
#line 43 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
global::haxe.lang.HaxeException _obj = ((global::haxe.lang.HaxeException) (obj) );
obj = _obj.getObject();
}
#line 46 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
this.obj = obj;
}
#line default
}
public static global::System.Exception wrap(object obj){
unchecked {
#line 61 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
if (( obj is global::System.Exception )) {
#line 61 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
return ((global::System.Exception) (obj) );
}
#line 63 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
return new global::haxe.lang.HaxeException(((object) (obj) ));
}
#line default
}
public object obj;
public virtual object getObject(){
unchecked {
#line 51 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
return this.obj;
}
#line default
}
public virtual string toString(){
unchecked {
#line 56 "C:\\HaxeToolkit\\haxe\\std\\cs\\internal\\Exceptions.hx"
return global::haxe.lang.Runtime.concat("Haxe Exception: ", global::Std.@string(this.obj));
}
#line default
}
public override string ToString(){
return this.toString();
}
}
}
| Espigah/HaxeRepo | Learning/HaxeClient/unity/out/src/cs/internal/Exceptions.cs | C# | apache-2.0 | 2,036 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using NUnit.Framework;
namespace DesignScript.Editor.Core.UnitTest
{
class TextBufferMultilineTests
{
TextBuffer textBuffer;
[SetUp]
public void Setup()
{
textBuffer = new TextBuffer(
" var a = 12.34;\n" +
" var b = 56.78;\n" +
" var c = 90.11;\n" +
" var d = 12345;\n" +
" var e = 67890;\n");
}
[Test]
public void TestGetTextInvalidRange01()
{
string textInRegion = textBuffer.GetText(1, 8, 3, 25);
Assert.AreEqual("b = 56.78;\n var c = 90.11;\n var d = 12345;\n", textInRegion);
}
}
}
| DynamoDS/designscript-archive | UIs/Editor/DesignScriptEditor/DesignScriptEditorCore/UnitTests/TextBufferMultilineTests.cs | C# | apache-2.0 | 835 |
/**
* Identify the name of all functions invoked within a given Expression
*
* Descend through the expression, identify every instance of a Function
* expression, and record the name of every function encountered.
*
* General usage is of the form
* ScanForFunctions.scan(myExpression)
*/
package mimir.context;
import java.util.ArrayList;
import java.util.List;
import net.sf.jsqlparser.expression.*;
public class ScanForFunctions extends ExpressionScan {
ArrayList<String> functions = new ArrayList<String>();
public ScanForFunctions(){ super(false); }
public void visit(Function fn) {
functions.add(fn.getName());
super.visit(fn);
}
public List<String> getFunctions(){ return functions; }
/**
* Compute a list of all function names in the given expression
*
* @param e An arbitrary expression
* @returns A list of all function names in e
*/
public static List<String> scan(Expression e){
ScanForFunctions scan = new ScanForFunctions();
e.accept(scan);
return scan.getFunctions();
}
/**
* Determine if a given expression contains any aggregate function calls
*
* @param e An arbitrary expression
* @returns true if e contains any aggregate functions as determined by
* the isAggregate method.
*/
public static boolean hasAggregate(Expression e){
for(String fn : scan(e)){
if(isAggregate(fn)){ return true; }
}
return false;
}
/**
* Determine if the given function name corresponds to a standard aggregate
* function.
*
* @param fn The name of a function
* @returns true if fn corresponds to the name of an aggregate function.
*/
public static boolean isAggregate(String fn)
{
fn = fn.toUpperCase();
return "SUM".equals(fn)
|| "COUNT".equals(fn)
|| "AVG".equals(fn)
|| "STDDEV".equals(fn)
|| "MAX".equals(fn)
|| "MIN".equals(fn);
}
} | sophieyoung717/mimir | src/main/java/mimir/context/ScanForFunctions.java | Java | apache-2.0 | 1,951 |
//// [invalidTaggedTemplateEscapeSequences.ts]
function tag (str: any, ...args: any[]): any {
return str
}
const a = tag`123`
const b = tag`123 ${100}`
const x = tag`\u{hello} ${ 100 } \xtraordinary ${ 200 } wonderful ${ 300 } \uworld`;
const y = `\u{hello} ${ 100 } \xtraordinary ${ 200 } wonderful ${ 300 } \uworld`; // should error with NoSubstitutionTemplate
const z = tag`\u{hello} \xtraordinary wonderful \uworld` // should work with Tagged NoSubstitutionTemplate
const a1 = tag`${ 100 }\0` // \0
const a2 = tag`${ 100 }\00` // \\00
const a3 = tag`${ 100 }\u` // \\u
const a4 = tag`${ 100 }\u0` // \\u0
const a5 = tag`${ 100 }\u00` // \\u00
const a6 = tag`${ 100 }\u000` // \\u000
const a7 = tag`${ 100 }\u0000` // \u0000
const a8 = tag`${ 100 }\u{` // \\u{
const a9 = tag`${ 100 }\u{10FFFF}` // \\u{10FFFF
const a10 = tag`${ 100 }\u{1f622` // \\u{1f622
const a11 = tag`${ 100 }\u{1f622}` // \u{1f622}
const a12 = tag`${ 100 }\x` // \\x
const a13 = tag`${ 100 }\x0` // \\x0
const a14 = tag`${ 100 }\x00` // \x00
//// [invalidTaggedTemplateEscapeSequences.js]
var __makeTemplateObject = (this && this.__makeTemplateObject) || function (cooked, raw) {
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
return cooked;
};
function tag(str) {
var args = [];
for (var _i = 1; _i < arguments.length; _i++) {
args[_i - 1] = arguments[_i];
}
return str;
}
var a = tag(__makeTemplateObject(["123"], ["123"]));
var b = tag(__makeTemplateObject(["123 ", ""], ["123 ", ""]), 100);
var x = tag(__makeTemplateObject([void 0, void 0, " wonderful ", void 0], ["\\u{hello} ", " \\xtraordinary ", " wonderful ", " \\uworld"]), 100, 200, 300);
var y = "hello} ".concat(100, " traordinary ").concat(200, " wonderful ").concat(300, " world"); // should error with NoSubstitutionTemplate
var z = tag(__makeTemplateObject([void 0], ["\\u{hello} \\xtraordinary wonderful \\uworld"])); // should work with Tagged NoSubstitutionTemplate
var a1 = tag(__makeTemplateObject(["", "\0"], ["", "\\0"]), 100); // \0
var a2 = tag(__makeTemplateObject(["", void 0], ["", "\\00"]), 100); // \\00
var a3 = tag(__makeTemplateObject(["", void 0], ["", "\\u"]), 100); // \\u
var a4 = tag(__makeTemplateObject(["", void 0], ["", "\\u0"]), 100); // \\u0
var a5 = tag(__makeTemplateObject(["", void 0], ["", "\\u00"]), 100); // \\u00
var a6 = tag(__makeTemplateObject(["", void 0], ["", "\\u000"]), 100); // \\u000
var a7 = tag(__makeTemplateObject(["", "\0"], ["", "\\u0000"]), 100); // \u0000
var a8 = tag(__makeTemplateObject(["", void 0], ["", "\\u{"]), 100); // \\u{
var a9 = tag(__makeTemplateObject(["", "\uDBFF\uDFFF"], ["", "\\u{10FFFF}"]), 100); // \\u{10FFFF
var a10 = tag(__makeTemplateObject(["", void 0], ["", "\\u{1f622"]), 100); // \\u{1f622
var a11 = tag(__makeTemplateObject(["", "\uD83D\uDE22"], ["", "\\u{1f622}"]), 100); // \u{1f622}
var a12 = tag(__makeTemplateObject(["", void 0], ["", "\\x"]), 100); // \\x
var a13 = tag(__makeTemplateObject(["", void 0], ["", "\\x0"]), 100); // \\x0
var a14 = tag(__makeTemplateObject(["", "\0"], ["", "\\x00"]), 100); // \x00
| microsoft/TypeScript | tests/baselines/reference/invalidTaggedTemplateEscapeSequences(target=es5).js | JavaScript | apache-2.0 | 3,182 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from datetime import datetime
from typing import Any, Dict, Optional, TYPE_CHECKING
from superset import is_feature_enabled
from superset.db_engine_specs.base import BaseEngineSpec
from superset.exceptions import SupersetException
from superset.utils import core as utils
if TYPE_CHECKING:
from superset.connectors.sqla.models import TableColumn
from superset.models.core import Database
logger = logging.getLogger()
class DruidEngineSpec(BaseEngineSpec):
"""Engine spec for Druid.io"""
engine = "druid"
engine_name = "Apache Druid"
allows_joins = is_feature_enabled("DRUID_JOINS")
allows_subqueries = True
_time_grain_expressions = {
None: "{col}",
"PT1S": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT1S')",
"PT5S": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT5S')",
"PT30S": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT30S')",
"PT1M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT1M')",
"PT5M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT5M')",
"PT10M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT10M')",
"PT15M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT15M')",
"PT30M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT30M')",
"PT1H": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT1H')",
"PT6H": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'PT6H')",
"P1D": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P1D')",
"P1W": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P1W')",
"P1M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P1M')",
"P3M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P3M')",
"P1Y": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P1Y')",
"P1W/1970-01-03T00:00:00Z": (
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), "
"'P1D', 1), 'P1W'), 'P1D', 5)"
),
"1969-12-28T00:00:00Z/P1W": (
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), "
"'P1D', 1), 'P1W'), 'P1D', -1)"
),
}
@classmethod
def alter_new_orm_column(cls, orm_col: "TableColumn") -> None:
if orm_col.column_name == "__time":
orm_col.is_dttm = True
@staticmethod
def get_extra_params(database: "Database") -> Dict[str, Any]:
"""
For Druid, the path to a SSL certificate is placed in `connect_args`.
:param database: database instance from which to extract extras
:raises CertificateException: If certificate is not valid/unparseable
:raises SupersetException: If database extra json payload is unparseable
"""
try:
extra = json.loads(database.extra or "{}")
except json.JSONDecodeError as ex:
raise SupersetException("Unable to parse database extras") from ex
if database.server_cert:
engine_params = extra.get("engine_params", {})
connect_args = engine_params.get("connect_args", {})
connect_args["scheme"] = "https"
path = utils.create_ssl_cert_file(database.server_cert)
connect_args["ssl_verify_cert"] = path
engine_params["connect_args"] = connect_args
extra["engine_params"] = engine_params
return extra
@classmethod
def convert_dttm(
cls, target_type: str, dttm: datetime, db_extra: Optional[Dict[str, Any]] = None
) -> Optional[str]:
tt = target_type.upper()
if tt == utils.TemporalType.DATE:
return f"CAST(TIME_PARSE('{dttm.date().isoformat()}') AS DATE)"
if tt in (utils.TemporalType.DATETIME, utils.TemporalType.TIMESTAMP):
return f"""TIME_PARSE('{dttm.isoformat(timespec="seconds")}')"""
return None
@classmethod
def epoch_to_dttm(cls) -> str:
"""
Convert from number of seconds since the epoch to a timestamp.
"""
return "MILLIS_TO_TIMESTAMP({col} * 1000)"
@classmethod
def epoch_ms_to_dttm(cls) -> str:
"""
Convert from number of milliseconds since the epoch to a timestamp.
"""
return "MILLIS_TO_TIMESTAMP({col})"
| apache/incubator-superset | superset/db_engine_specs/druid.py | Python | apache-2.0 | 4,908 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.lucene;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.camel.Exchange;
import org.apache.camel.converter.IOConverter;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.util.Version;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LuceneIndexer {
private static final transient Logger LOG = LoggerFactory.getLogger(LuceneIndexer.class);
private File sourceDirectory;
private Analyzer analyzer;
private NIOFSDirectory niofsDirectory;
private IndexWriter indexWriter;
private boolean sourceDirectoryIndexed;
private boolean indexCreated;
public LuceneIndexer(File sourceDirectory, File indexDirectory, Analyzer analyzer) throws Exception {
if (indexDirectory != null) {
if (!indexDirectory.exists()) {
indexDirectory.mkdir();
}
this.setNiofsDirectory(new NIOFSDirectory(indexDirectory));
} else {
this.setNiofsDirectory(new NIOFSDirectory(new File("./indexDirectory")));
}
this.setAnalyzer(analyzer);
if ((sourceDirectory != null) && (!sourceDirectoryIndexed)) {
this.setSourceDirectory(sourceDirectory);
add(getSourceDirectory());
sourceDirectoryIndexed = true;
}
}
public void index(Exchange exchange) throws Exception {
LOG.debug("Indexing {}", exchange);
openIndexWriter();
Map<String, Object> headers = exchange.getIn().getHeaders();
add("exchangeId", exchange.getExchangeId(), true);
for (Entry<String, Object> entry : headers.entrySet()) {
String field = entry.getKey();
String value = exchange.getContext().getTypeConverter().convertTo(String.class, entry.getValue());
add(field, value, true);
}
add("contents", exchange.getIn().getMandatoryBody(String.class), true);
closeIndexWriter();
}
public NIOFSDirectory getNiofsDirectory() {
return niofsDirectory;
}
public void setNiofsDirectory(NIOFSDirectory niofsDirectory) {
this.niofsDirectory = niofsDirectory;
}
public File getSourceDirectory() {
return sourceDirectory;
}
public void setSourceDirectory(File sourceDirectory) {
this.sourceDirectory = sourceDirectory;
}
public Analyzer getAnalyzer() {
return analyzer;
}
public void setAnalyzer(Analyzer analyzer) {
this.analyzer = analyzer;
}
private void add(String field, String value, boolean analyzed) throws IOException {
if (LOG.isTraceEnabled()) {
LOG.trace("Adding field: {}", field);
LOG.trace(" value: {}", value);
}
Document doc = new Document();
if (!analyzed) {
doc.add(new Field(field, value, Field.Store.YES, Field.Index.NOT_ANALYZED));
} else {
doc.add(new Field(field, value, Field.Store.YES, Field.Index.ANALYZED));
}
indexWriter.addDocument(doc);
}
private void add(File file) throws IOException {
if (file.canRead()) {
if (file.isDirectory()) {
String[] files = file.list();
if (files != null) {
for (String child : files) {
add(new File(file.getAbsolutePath() + "/" + child));
}
}
} else {
LOG.trace("Adding {}", file);
openIndexWriter();
add("path", file.getPath(), false);
add("contents", new String(IOConverter.toByteArray(file)), true);
closeIndexWriter();
LOG.trace("Added {} successfully", file);
}
} else {
LOG.warn("Directory/File " + file.getAbsolutePath() + " could not be read."
+ " This directory will not be indexed. Please check permissions and rebuild indexes.");
}
}
private void openIndexWriter() throws IOException {
IndexWriterConfig indexWriterConfig;
if (!indexCreated) {
indexWriterConfig = new IndexWriterConfig(Version.LUCENE_35, getAnalyzer()).setOpenMode(OpenMode.CREATE);
indexWriter = new IndexWriter(niofsDirectory, indexWriterConfig);
indexCreated = true;
return;
}
indexWriterConfig = new IndexWriterConfig(Version.LUCENE_35, getAnalyzer()).setOpenMode(OpenMode.APPEND);
indexWriter = new IndexWriter(niofsDirectory, indexWriterConfig);
}
private void closeIndexWriter() throws IOException {
indexWriter.commit();
indexWriter.close();
}
}
| aaronwalker/camel | components/camel-lucene/src/main/java/org/apache/camel/component/lucene/LuceneIndexer.java | Java | apache-2.0 | 5,933 |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace ZipCodeFinal.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
} | awslabs/aws-sdk-net-samples | Talks/vslive-2015/ZipCodes/ZipCodeFinal/Areas/HelpPage/SampleGeneration/ObjectGenerator.cs | C# | apache-2.0 | 19,494 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.api.bigdecimal;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.MockitoAnnotations.initMocks;
import java.math.BigDecimal;
import java.util.Comparator;
import org.assertj.core.api.BigDecimalAssert;
import org.assertj.core.api.BigDecimalAssertBaseTest;
import org.assertj.core.internal.BigDecimals;
import org.assertj.core.internal.Objects;
import org.junit.jupiter.api.BeforeEach;
import org.mockito.Mock;
/**
* Tests for <code>{@link BigDecimalAssert#usingDefaultComparator()}</code>.
*
* @author Joel Costigliola
* @author Mikhail Mazursky
*/
public class BigDecimalAssert_usingDefaultComparator_Test extends BigDecimalAssertBaseTest {
@Mock
private Comparator<BigDecimal> comparator;
@BeforeEach
public void before() {
initMocks(this);
assertions.usingComparator(comparator);
}
@Override
protected BigDecimalAssert invoke_api_method() {
return assertions.usingDefaultComparator();
}
@Override
protected void verify_internal_effects() {
assertThat(Objects.instance()).isSameAs(getObjects(assertions));
assertThat(BigDecimals.instance()).isSameAs(getComparables(assertions));
}
}
| xasx/assertj-core | src/test/java/org/assertj/core/api/bigdecimal/BigDecimalAssert_usingDefaultComparator_Test.java | Java | apache-2.0 | 1,806 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Compute.V1.Snippets
{
// [START compute_v1_generated_InstanceGroupManagers_Insert_async]
using Google.Cloud.Compute.V1;
using System.Threading.Tasks;
using lro = Google.LongRunning;
public sealed partial class GeneratedInstanceGroupManagersClientSnippets
{
/// <summary>Snippet for InsertAsync</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public async Task InsertRequestObjectAsync()
{
// Create client
InstanceGroupManagersClient instanceGroupManagersClient = await InstanceGroupManagersClient.CreateAsync();
// Initialize request argument(s)
InsertInstanceGroupManagerRequest request = new InsertInstanceGroupManagerRequest
{
Zone = "",
RequestId = "",
Project = "",
InstanceGroupManagerResource = new InstanceGroupManager(),
};
// Make the request
lro::Operation<Operation, Operation> response = await instanceGroupManagersClient.InsertAsync(request);
// Poll until the returned long-running operation is complete
lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync();
// Retrieve the operation result
Operation result = completedResponse.Result;
// Or get the name of the operation
string operationName = response.Name;
// This name can be stored, then the long-running operation retrieved later by name
lro::Operation<Operation, Operation> retrievedResponse = await instanceGroupManagersClient.PollOnceInsertAsync(operationName);
// Check if the retrieved long-running operation has completed
if (retrievedResponse.IsCompleted)
{
// If it has completed, then access the result
Operation retrievedResult = retrievedResponse.Result;
}
}
}
// [END compute_v1_generated_InstanceGroupManagers_Insert_async]
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Compute.V1/Google.Cloud.Compute.V1.GeneratedSnippets/InstanceGroupManagersClient.InsertRequestObjectAsyncSnippet.g.cs | C# | apache-2.0 | 2,871 |
<?php
return array (
'<strong>Module</strong> details' => '<strong>Modul</strong> Informationen ',
'This module doesn\'t provide further informations.' => 'Dieses Modul stellt keine weiteren Informationen zur Verfügung.',
);
| calonso-conabio/intranet | protected/humhub/modules/admin/messages/de/views_module_info.php | PHP | apache-2.0 | 230 |
package de.terrestris.shoguncore.dao;
import de.terrestris.shoguncore.model.layer.source.LayerDataSource;
import org.springframework.stereotype.Repository;
@Repository("layerDataSourceDao")
public class LayerDataSourceDao<E extends LayerDataSource> extends
GenericHibernateDao<E, Integer> {
/**
* Public default constructor for this DAO.
*/
@SuppressWarnings("unchecked")
public LayerDataSourceDao() {
super((Class<E>) LayerDataSource.class);
}
/**
* Constructor that has to be called by subclasses.
*
* @param clazz
*/
protected LayerDataSourceDao(Class<E> clazz) {
super(clazz);
}
}
| ahennr/shogun2 | src/shogun-core-main/src/main/java/de/terrestris/shoguncore/dao/LayerDataSourceDao.java | Java | apache-2.0 | 668 |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package handlers
import (
"context"
"encoding/hex"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/url"
goruntime "runtime"
"strings"
"time"
"k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
metav1beta1 "k8s.io/apimachinery/pkg/apis/meta/v1beta1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/apiserver/pkg/admission"
"k8s.io/apiserver/pkg/authorization/authorizer"
"k8s.io/apiserver/pkg/endpoints/handlers/fieldmanager"
"k8s.io/apiserver/pkg/endpoints/handlers/responsewriters"
"k8s.io/apiserver/pkg/endpoints/metrics"
"k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
"k8s.io/klog"
utiltrace "k8s.io/utils/trace"
)
// RequestScope encapsulates common fields across all RESTful handler methods.
type RequestScope struct {
Namer ScopeNamer
Serializer runtime.NegotiatedSerializer
runtime.ParameterCodec
Creater runtime.ObjectCreater
Convertor runtime.ObjectConvertor
Defaulter runtime.ObjectDefaulter
Typer runtime.ObjectTyper
UnsafeConvertor runtime.ObjectConvertor
Authorizer authorizer.Authorizer
Trace *utiltrace.Trace
TableConvertor rest.TableConvertor
FieldManager *fieldmanager.FieldManager
Resource schema.GroupVersionResource
Kind schema.GroupVersionKind
Subresource string
MetaGroupVersion schema.GroupVersion
// HubGroupVersion indicates what version objects read from etcd or incoming requests should be converted to for in-memory handling.
HubGroupVersion schema.GroupVersion
MaxRequestBodyBytes int64
}
func (scope *RequestScope) err(err error, w http.ResponseWriter, req *http.Request) {
responsewriters.ErrorNegotiated(err, scope.Serializer, scope.Kind.GroupVersion(), w, req)
}
func (scope *RequestScope) AllowsConversion(gvk schema.GroupVersionKind) bool {
// TODO: this is temporary, replace with an abstraction calculated at endpoint installation time
if gvk.GroupVersion() == metav1beta1.SchemeGroupVersion {
switch gvk.Kind {
case "Table":
return scope.TableConvertor != nil
case "PartialObjectMetadata", "PartialObjectMetadataList":
// TODO: should delineate between lists and non-list endpoints
return true
default:
return false
}
}
return false
}
func (scope *RequestScope) AllowsServerVersion(version string) bool {
return version == scope.MetaGroupVersion.Version
}
func (scope *RequestScope) AllowsStreamSchema(s string) bool {
return s == "watch"
}
// ConnectResource returns a function that handles a connect request on a rest.Storage object.
func ConnectResource(connecter rest.Connecter, scope RequestScope, admit admission.Interface, restPath string, isSubresource bool) http.HandlerFunc {
return func(w http.ResponseWriter, req *http.Request) {
if isDryRun(req.URL) {
scope.err(errors.NewBadRequest("dryRun is not supported"), w, req)
return
}
namespace, name, err := scope.Namer.Name(req)
if err != nil {
scope.err(err, w, req)
return
}
ctx := req.Context()
ctx = request.WithNamespace(ctx, namespace)
ae := request.AuditEventFrom(ctx)
admit = admission.WithAudit(admit, ae)
opts, subpath, subpathKey := connecter.NewConnectOptions()
if err := getRequestOptions(req, scope, opts, subpath, subpathKey, isSubresource); err != nil {
err = errors.NewBadRequest(err.Error())
scope.err(err, w, req)
return
}
if admit != nil && admit.Handles(admission.Connect) {
userInfo, _ := request.UserFrom(ctx)
// TODO: remove the mutating admission here as soon as we have ported all plugin that handle CONNECT
if mutatingAdmission, ok := admit.(admission.MutationInterface); ok {
err = mutatingAdmission.Admit(admission.NewAttributesRecord(opts, nil, scope.Kind, namespace, name, scope.Resource, scope.Subresource, admission.Connect, false, userInfo))
if err != nil {
scope.err(err, w, req)
return
}
}
if validatingAdmission, ok := admit.(admission.ValidationInterface); ok {
err = validatingAdmission.Validate(admission.NewAttributesRecord(opts, nil, scope.Kind, namespace, name, scope.Resource, scope.Subresource, admission.Connect, false, userInfo))
if err != nil {
scope.err(err, w, req)
return
}
}
}
requestInfo, _ := request.RequestInfoFrom(ctx)
metrics.RecordLongRunning(req, requestInfo, metrics.APIServerComponent, func() {
handler, err := connecter.Connect(ctx, name, opts, &responder{scope: scope, req: req, w: w})
if err != nil {
scope.err(err, w, req)
return
}
handler.ServeHTTP(w, req)
})
}
}
// responder implements rest.Responder for assisting a connector in writing objects or errors.
type responder struct {
scope RequestScope
req *http.Request
w http.ResponseWriter
}
func (r *responder) Object(statusCode int, obj runtime.Object) {
responsewriters.WriteObject(statusCode, r.scope.Kind.GroupVersion(), r.scope.Serializer, obj, r.w, r.req)
}
func (r *responder) Error(err error) {
r.scope.err(err, r.w, r.req)
}
// resultFunc is a function that returns a rest result and can be run in a goroutine
type resultFunc func() (runtime.Object, error)
// finishRequest makes a given resultFunc asynchronous and handles errors returned by the response.
// An api.Status object with status != success is considered an "error", which interrupts the normal response flow.
func finishRequest(timeout time.Duration, fn resultFunc) (result runtime.Object, err error) {
// these channels need to be buffered to prevent the goroutine below from hanging indefinitely
// when the select statement reads something other than the one the goroutine sends on.
ch := make(chan runtime.Object, 1)
errCh := make(chan error, 1)
panicCh := make(chan interface{}, 1)
go func() {
// panics don't cross goroutine boundaries, so we have to handle ourselves
defer func() {
panicReason := recover()
if panicReason != nil {
const size = 64 << 10
buf := make([]byte, size)
buf = buf[:goruntime.Stack(buf, false)]
panicReason = strings.TrimSuffix(fmt.Sprintf("%v\n%s", panicReason, string(buf)), "\n")
// Propagate to parent goroutine
panicCh <- panicReason
}
}()
if result, err := fn(); err != nil {
errCh <- err
} else {
ch <- result
}
}()
select {
case result = <-ch:
if status, ok := result.(*metav1.Status); ok {
if status.Status != metav1.StatusSuccess {
return nil, errors.FromObject(status)
}
}
return result, nil
case err = <-errCh:
return nil, err
case p := <-panicCh:
panic(p)
case <-time.After(timeout):
return nil, errors.NewTimeoutError(fmt.Sprintf("request did not complete within requested timeout %s", timeout), 0)
}
}
// transformDecodeError adds additional information when a decode fails.
func transformDecodeError(typer runtime.ObjectTyper, baseErr error, into runtime.Object, gvk *schema.GroupVersionKind, body []byte) error {
objGVKs, _, err := typer.ObjectKinds(into)
if err != nil {
return err
}
objGVK := objGVKs[0]
if gvk != nil && len(gvk.Kind) > 0 {
return errors.NewBadRequest(fmt.Sprintf("%s in version %q cannot be handled as a %s: %v", gvk.Kind, gvk.Version, objGVK.Kind, baseErr))
}
summary := summarizeData(body, 30)
return errors.NewBadRequest(fmt.Sprintf("the object provided is unrecognized (must be of type %s): %v (%s)", objGVK.Kind, baseErr, summary))
}
// setSelfLink sets the self link of an object (or the child items in a list) to the base URL of the request
// plus the path and query generated by the provided linkFunc
func setSelfLink(obj runtime.Object, requestInfo *request.RequestInfo, namer ScopeNamer) error {
// TODO: SelfLink generation should return a full URL?
uri, err := namer.GenerateLink(requestInfo, obj)
if err != nil {
return nil
}
return namer.SetSelfLink(obj, uri)
}
func hasUID(obj runtime.Object) (bool, error) {
if obj == nil {
return false, nil
}
accessor, err := meta.Accessor(obj)
if err != nil {
return false, errors.NewInternalError(err)
}
if len(accessor.GetUID()) == 0 {
return false, nil
}
return true, nil
}
// checkName checks the provided name against the request
func checkName(obj runtime.Object, name, namespace string, namer ScopeNamer) error {
objNamespace, objName, err := namer.ObjectName(obj)
if err != nil {
return errors.NewBadRequest(fmt.Sprintf(
"the name of the object (%s based on URL) was undeterminable: %v", name, err))
}
if objName != name {
return errors.NewBadRequest(fmt.Sprintf(
"the name of the object (%s) does not match the name on the URL (%s)", objName, name))
}
if len(namespace) > 0 {
if len(objNamespace) > 0 && objNamespace != namespace {
return errors.NewBadRequest(fmt.Sprintf(
"the namespace of the object (%s) does not match the namespace on the request (%s)", objNamespace, namespace))
}
}
return nil
}
// setObjectSelfLink sets the self link of an object as needed.
func setObjectSelfLink(ctx context.Context, obj runtime.Object, req *http.Request, namer ScopeNamer) error {
if !meta.IsListType(obj) {
requestInfo, ok := request.RequestInfoFrom(ctx)
if !ok {
return fmt.Errorf("missing requestInfo")
}
return setSelfLink(obj, requestInfo, namer)
}
uri, err := namer.GenerateListLink(req)
if err != nil {
return err
}
if err := namer.SetSelfLink(obj, uri); err != nil {
klog.V(4).Infof("Unable to set self link on object: %v", err)
}
requestInfo, ok := request.RequestInfoFrom(ctx)
if !ok {
return fmt.Errorf("missing requestInfo")
}
count := 0
err = meta.EachListItem(obj, func(obj runtime.Object) error {
count++
return setSelfLink(obj, requestInfo, namer)
})
if count == 0 {
if err := meta.SetList(obj, []runtime.Object{}); err != nil {
return err
}
}
return err
}
func summarizeData(data []byte, maxLength int) string {
switch {
case len(data) == 0:
return "<empty>"
case data[0] == '{':
if len(data) > maxLength {
return string(data[:maxLength]) + " ..."
}
return string(data)
default:
if len(data) > maxLength {
return hex.EncodeToString(data[:maxLength]) + " ..."
}
return hex.EncodeToString(data)
}
}
func limitedReadBody(req *http.Request, limit int64) ([]byte, error) {
defer req.Body.Close()
if limit <= 0 {
return ioutil.ReadAll(req.Body)
}
lr := &io.LimitedReader{
R: req.Body,
N: limit + 1,
}
data, err := ioutil.ReadAll(lr)
if err != nil {
return nil, err
}
if lr.N <= 0 {
return nil, errors.NewRequestEntityTooLargeError(fmt.Sprintf("limit is %d", limit))
}
return data, nil
}
func parseTimeout(str string) time.Duration {
if str != "" {
timeout, err := time.ParseDuration(str)
if err == nil {
return timeout
}
klog.Errorf("Failed to parse %q: %v", str, err)
}
return 30 * time.Second
}
func isDryRun(url *url.URL) bool {
return len(url.Query()["dryRun"]) != 0
}
| vmware/kubernetes | staging/src/k8s.io/apiserver/pkg/endpoints/handlers/rest.go | GO | apache-2.0 | 11,462 |
/*
* Copyright (C) 2017 Ignite Realtime Foundation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jivesoftware.openfire.pep;
import org.jivesoftware.openfire.pubsub.PubSubServiceInfo;
import org.xmpp.packet.JID;
/**
* A PubSubService manager that is specific to the implemenation of XEP-163: Personal Eventing Protocol.
*
* @author Guus der Kinderen, guus.der.kinderen@gmail.com
*/
public class PEPServiceInfo extends PubSubServiceInfo
{
public PEPServiceInfo( JID owner )
{
super( new PEPServiceManager().getPEPService( owner.toBareJID() ) );
}
}
| speedy01/Openfire | xmppserver/src/main/java/org/jivesoftware/openfire/pep/PEPServiceInfo.java | Java | apache-2.0 | 1,128 |
package pl.temomuko.autostoprace.ui.teamslocationsmap.adapter.map;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.model.Marker;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
import pl.temomuko.autostoprace.R;
import pl.temomuko.autostoprace.injection.AppContext;
/**
* Created by Rafał Naniewicz on 02.04.2016.
*/
public class TeamLocationInfoWindowAdapter implements GoogleMap.InfoWindowAdapter {
@BindView(R.id.tv_location_record_date) TextView mLocationRecordDateTextView;
@BindView(R.id.tv_location_record_message) TextView mLocationRecordMessageTextView;
private final View mContentsView;
@Inject
public TeamLocationInfoWindowAdapter(@AppContext Context context) {
mContentsView = LayoutInflater.from(context).inflate(R.layout.adapter_team_location_info_window, null, false);
ButterKnife.bind(this, mContentsView);
}
@Override
public View getInfoWindow(Marker marker) {
return null;
}
@Override
public View getInfoContents(Marker marker) {
mLocationRecordMessageTextView.setText(marker.getTitle());
mLocationRecordDateTextView.setText(marker.getSnippet());
return mContentsView;
}
}
| TeMoMuKo/AutoStopRace | app/src/main/java/pl/temomuko/autostoprace/ui/teamslocationsmap/adapter/map/TeamLocationInfoWindowAdapter.java | Java | apache-2.0 | 1,400 |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.core.framework.persistence.jpa.metadata;
import javax.persistence.CascadeType;
/**
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class ManyToOneDescriptor extends ObjectDescriptor implements java.io.Serializable {
private static final long serialVersionUID = -1277621663465909764L;
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("ManyToOneDescriptor = [ ");
sb.append("targetEntity:").append(targetEntity.getName()).append(", ");
sb.append("cascade = { ");
for (CascadeType ct : cascade) {
sb.append(ct).append(" ");
}
sb.append("}, ");
sb.append("fetch:").append(fetch).append(", ");
sb.append("optional:").append(optional);
if (!joinColumnDescriptors.isEmpty()) {
sb.append(", join columns = { ");
for (JoinColumnDescriptor joinColumnDescriptor : joinColumnDescriptors) {
sb.append(" jc = { ");
sb.append("name:").append(joinColumnDescriptor.getName()).append(", ");
sb.append("insertable:").append(joinColumnDescriptor.isInsertable()).append(", ");
sb.append("nullable:").append(joinColumnDescriptor.isNullable()).append(", ");
sb.append("unique:").append(joinColumnDescriptor.isUnique()).append(", ");
sb.append("updateable:").append(joinColumnDescriptor.isUpdateable());
sb.append(" }");
}
sb.append(" } ");
}
sb.append(" ]");
return sb.toString();
}
}
| ua-eas/ksd-kc5.2.1-rice2.3.6-ua | rice-middleware/core/framework/src/main/java/org/kuali/rice/core/framework/persistence/jpa/metadata/ManyToOneDescriptor.java | Java | apache-2.0 | 2,029 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.obrplugin;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.regex.Pattern;
import org.apache.felix.bundlerepository.Resource;
import org.apache.felix.bundlerepository.impl.DataModelHelperImpl;
import org.apache.felix.bundlerepository.impl.PullParser;
import org.apache.felix.bundlerepository.impl.RepositoryImpl;
import org.apache.felix.bundlerepository.impl.RepositoryParser;
import org.apache.felix.bundlerepository.impl.ResourceImpl;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.util.FileUtils;
import org.kxml2.io.KXmlParser;
import org.xmlpull.v1.XmlPullParser;
/**
* this class parse the old repository.xml file build the bundle resource description and update the repository.
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class ObrUpdate
{
private static Pattern TIMESTAMP = Pattern.compile( "-[0-9]{8}\\.[0-9]{6}-[0-9]+" );
private static Method setURI;
static
{
try
{
setURI = RepositoryImpl.class.getDeclaredMethod( "setURI", String.class );
setURI.setAccessible( true );
}
catch ( Exception e )
{
setURI = null;
}
}
/**
* logger for this plugin.
*/
private Log m_logger;
/**
* name and path to the repository descriptor file.
*/
private URI m_repositoryXml;
/**
* name and path to the obr.xml file.
*/
private URI m_obrXml;
/**
* maven project description.
*/
private MavenProject m_project;
/**
* user configuration information.
*/
private Config m_userConfig;
/**
* root on parent document.
*/
private RepositoryImpl m_repository;
/**
* used to store bundle information.
*/
private ResourceImpl m_resourceBundle;
/**
* base URI used to relativize bundle URIs.
*/
private URI m_baseURI;
/**
* initialize information.
* @param repositoryXml path to the repository descriptor file
* @param obrXml path and filename to the obr.xml file
* @param project maven project description
* @param mavenRepositoryPath path to the local maven repository
* @param userConfig user information
* @param logger plugin logger
*/
public ObrUpdate( URI repositoryXml, URI obrXml, MavenProject project, String mavenRepositoryPath,
Config userConfig, Log logger )
{
m_repositoryXml = repositoryXml;
m_obrXml = obrXml;
m_project = project;
m_logger = logger;
m_userConfig = userConfig;
if ( userConfig.isRemoteFile() )
{
m_baseURI = ObrUtils.toFileURI( mavenRepositoryPath );
}
else
{
m_baseURI = m_repositoryXml;
}
}
/**
* update the repository descriptor file. parse the old repository descriptor file,
* get the old reference of the bundle or determine the id for a new bundle, extract
* information from bindex set the new information in descriptor file and save it.
*
* @param bundleJar path to the bundle jar file
* @param sourceJar path to the source jar file
* @param docJar path to the docs jar file
*
* @throws MojoExecutionException if the plugin failed
*/
public void updateRepository( URI bundleJar, URI sourceJar, URI docJar ) throws MojoExecutionException
{
m_logger.debug( " (f) repositoryXml = " + m_repositoryXml );
m_logger.debug( " (f) bundleJar = " + bundleJar );
m_logger.debug( " (f) sourceJar = " + sourceJar );
m_logger.debug( " (f) docJar = " + docJar );
m_logger.debug( " (f) obrXml = " + m_obrXml );
if ( m_repository == null )
{
return;
}
// get the file size
File bundleFile = new File( bundleJar );
if ( !bundleFile.exists() )
{
String snapshot = TIMESTAMP.matcher( bundleFile.getName() ).replaceFirst( "-SNAPSHOT" );
bundleFile = new File( bundleFile.getParentFile(), snapshot );
}
if ( bundleFile.exists() )
{
URI resourceURI = m_userConfig.getRemoteBundle();
if ( null == resourceURI )
{
resourceURI = bundleJar;
if ( m_userConfig.isPathRelative() )
{
resourceURI = ObrUtils.getRelativeURI( m_baseURI, resourceURI );
}
}
if ( m_userConfig.isRemoteFile() )
{
m_logger.info( "Deploying " + resourceURI );
}
else
{
m_logger.info( "Installing " + resourceURI );
}
try
{
m_resourceBundle = ( ResourceImpl ) new DataModelHelperImpl().createResource( bundleFile.toURI().toURL() );
if ( m_resourceBundle == null )
{
return;
}
}
catch ( IOException e )
{
throw new MojoExecutionException( "Unable to load resource information", e );
}
m_resourceBundle.put( Resource.SIZE, String.valueOf( bundleFile.length() ) );
m_resourceBundle.put( Resource.URI, resourceURI.toASCIIString() );
}
else
{
m_logger.error( "file doesn't exist: " + bundleJar );
return;
}
// parse the obr.xml file
if ( m_obrXml != null )
{
m_logger.info( "Adding " + m_obrXml );
// URL url = getClass().getResource("/SchemaObr.xsd");
// TODO validate obr.xml file
// add contents to resource bundle
parseObrXml();
}
String sourcePath = relativisePath( sourceJar );
String docPath = relativisePath( docJar );
// m_resourceBundle.construct( m_project, bindexExtractor, sourcePath, docPath );
// TODO: rebuild wrt m_project
m_repository.addResource( m_resourceBundle );
m_repository.setLastModified( System.currentTimeMillis() );
}
private String relativisePath( URI uri )
{
if ( null != uri )
{
if ( m_userConfig.isPathRelative() )
{
return ObrUtils.getRelativeURI( m_baseURI, uri ).toASCIIString();
}
return uri.toASCIIString();
}
return null;
}
public void writeRepositoryXml() throws MojoExecutionException
{
m_logger.info( "Writing OBR metadata" );
File file = null;
Writer writer;
try
{
file = File.createTempFile( "repository", ".xml" );
writer = new OutputStreamWriter( new FileOutputStream( file ) );
}
catch ( IOException e )
{
m_logger.error( "Unable to write to file: " + file.getName() );
e.printStackTrace();
throw new MojoExecutionException( "Unable to write to file: " + file.getName() + " : " + e.getMessage() );
}
try
{
new DataModelHelperImpl().writeRepository( m_repository, writer );
}
catch ( IOException e )
{
throw new MojoExecutionException( "Unable to write repository xml", e );
}
try
{
writer.flush();
writer.close();
File outputFile = new File( m_repositoryXml );
outputFile.getParentFile().mkdirs();
FileUtils.rename( file, outputFile );
}
catch ( IOException e )
{
e.printStackTrace();
throw new MojoExecutionException( "IOException" );
}
}
/**
* Parse the repository descriptor file.
*
* @throws MojoExecutionException if the plugin failed
*/
public void parseRepositoryXml() throws MojoExecutionException
{
File fout = new File( m_repositoryXml );
if ( !fout.exists() )
{
m_repository = new RepositoryImpl();
writeRepositoryXml();
}
else
{
try
{
m_repository = ( RepositoryImpl ) new DataModelHelperImpl().repository( m_repositoryXml.toURL() );
if ( setURI != null )
{
setURI.invoke( m_repository, ( String ) null );
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to read repository xml: " + m_repositoryXml, e );
}
}
}
/**
* put the information from obr.xml into ressourceBundle object.
*/
private void parseObrXml() throws MojoExecutionException
{
try
{
InputStream is = new FileInputStream( new File( m_obrXml ) );
try
{
KXmlParser kxp = new KXmlParser();
kxp.setInput( is, null );
kxp.nextTag(); // skip top level element
kxp.nextTag(); // go to first child element
parseObrXml( kxp );
}
finally
{
is.close();
}
}
catch ( Exception e )
{
throw new MojoExecutionException( "Unable to parse obr xml: " + m_obrXml, e );
}
}
private void parseObrXml( KXmlParser kxp ) throws Exception
{
PullParser parser = new PullParser();
while ( kxp.getEventType() == XmlPullParser.START_TAG )
{
if ( RepositoryParser.CATEGORY.equals( kxp.getName() ) )
{
m_resourceBundle.addCategory( parser.parseCategory( kxp ) );
}
else if ( RepositoryParser.REQUIRE.equals( kxp.getName() ) )
{
m_resourceBundle.addRequire( parser.parseRequire( kxp ) );
}
else if ( RepositoryParser.CAPABILITY.equals( kxp.getName() ) )
{
m_resourceBundle.addCapability( parser.parseCapability( kxp ) );
}
else
{
kxp.nextTag();
parseObrXml( kxp );
}
kxp.nextTag();
}
}
}
| boneman1231/org.apache.felix | trunk/bundleplugin/src/main/java/org/apache/felix/obrplugin/ObrUpdate.java | Java | apache-2.0 | 11,514 |
//-----------------------------------------------------------------------
// <copyright file="IJournalInterceptor.cs" company="Akka.NET Project">
// Copyright (C) 2009-2020 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2020 .NET Foundation <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
namespace Akka.Persistence.TestKit
{
using System.Threading.Tasks;
/// <summary>
/// Interface to object which will intercept written and recovered messages in <see cref="TestJournal"/>.
/// </summary>
public interface IJournalInterceptor
{
/// <summary>
/// Method will be called for each individual message before it is written or recovered.
/// </summary>
/// <param name="message">Written or recovered message.</param>
Task InterceptAsync(IPersistentRepresentation message);
}
}
| simonlaroche/akka.net | src/core/Akka.Persistence.TestKit/Journal/IJournalInterceptor.cs | C# | apache-2.0 | 961 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tachyon.worker;
/**
* The worker space counter, in charge of counting and granting spaces in a worker daemon.
*/
public class WorkerSpaceCounter {
private final long CAPACITY_BYTES;
private long mUsedBytes;
/**
* @param capacityBytes
* The maximum memory space the TachyonWorker can use, in bytes
*/
public WorkerSpaceCounter(long capacityBytes) {
CAPACITY_BYTES = capacityBytes;
mUsedBytes = 0;
}
/**
* @return The available space size, in bytes
*/
public synchronized long getAvailableBytes() {
return CAPACITY_BYTES - mUsedBytes;
}
/**
* @return The maximum memory space the TachyonWorker can use, in bytes
*/
public long getCapacityBytes() {
return CAPACITY_BYTES;
}
/**
* @return The bytes that have been used
*/
public synchronized long getUsedBytes() {
return mUsedBytes;
}
/**
* Request space
*
* @param requestSpaceBytes
* The requested space size, in bytes
* @return
*/
public synchronized boolean requestSpaceBytes(long requestSpaceBytes) {
if (getAvailableBytes() < requestSpaceBytes) {
return false;
}
mUsedBytes += requestSpaceBytes;
return true;
}
/**
* Return used space size
*
* @param returnUsedBytes
* The returned space size, in bytes
*/
public synchronized void returnUsedBytes(long returnUsedBytes) {
mUsedBytes -= returnUsedBytes;
}
@Override
public synchronized String toString() {
StringBuilder sb = new StringBuilder("WorkerSpaceCounter(");
sb.append(" TOTAL_BYTES: ").append(CAPACITY_BYTES);
sb.append(", mUsedBytes: ").append(mUsedBytes);
sb.append(", mAvailableBytes: ").append(CAPACITY_BYTES - mUsedBytes);
sb.append(" )");
return sb.toString();
}
/**
* Update the used bytes
*
* @param usedBytes
* The new used bytes
*/
public synchronized void updateUsedBytes(long usedBytes) {
mUsedBytes = usedBytes;
}
} | gsoundar/mambo-ec2-deploy | packages/tachyon-0.5.0/core/src/main/java/tachyon/worker/WorkerSpaceCounter.java | Java | apache-2.0 | 2,787 |
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.context;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.context.properties.bind.Bindable;
import org.springframework.boot.context.properties.bind.Binder;
import org.springframework.boot.context.properties.source.ConfigurationPropertySource;
import org.springframework.boot.context.properties.source.MapConfigurationPropertySource;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.core.annotation.AnnotatedElementUtils;
import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.core.env.Environment;
import org.springframework.core.io.support.SpringFactoriesLoader;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.ContextConfigurationAttributes;
import org.springframework.test.context.ContextHierarchy;
import org.springframework.test.context.ContextLoader;
import org.springframework.test.context.MergedContextConfiguration;
import org.springframework.test.context.TestContext;
import org.springframework.test.context.TestContextBootstrapper;
import org.springframework.test.context.TestExecutionListener;
import org.springframework.test.context.support.DefaultTestContextBootstrapper;
import org.springframework.test.context.support.TestPropertySourceUtils;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.context.web.WebMergedContextConfiguration;
import org.springframework.util.Assert;
import org.springframework.util.ClassUtils;
import org.springframework.util.ObjectUtils;
import org.springframework.util.StringUtils;
/**
* {@link TestContextBootstrapper} for Spring Boot. Provides support for
* {@link SpringBootTest @SpringBootTest} and may also be used directly or subclassed.
* Provides the following features over and above {@link DefaultTestContextBootstrapper}:
* <ul>
* <li>Uses {@link SpringBootContextLoader} as the
* {@link #getDefaultContextLoaderClass(Class) default context loader}.</li>
* <li>Automatically searches for a
* {@link SpringBootConfiguration @SpringBootConfiguration} when required.</li>
* <li>Allows custom {@link Environment} {@link #getProperties(Class)} to be defined.</li>
* <li>Provides support for different {@link WebEnvironment webEnvironment} modes.</li>
* </ul>
*
* @author Phillip Webb
* @author Andy Wilkinson
* @author Brian Clozel
* @author Madhura Bhave
* @since 1.4.0
* @see SpringBootTest
* @see TestConfiguration
*/
public class SpringBootTestContextBootstrapper extends DefaultTestContextBootstrapper {
private static final String[] WEB_ENVIRONMENT_CLASSES = { "javax.servlet.Servlet",
"org.springframework.web.context.ConfigurableWebApplicationContext" };
private static final String REACTIVE_WEB_ENVIRONMENT_CLASS = "org.springframework."
+ "web.reactive.DispatcherHandler";
private static final String MVC_WEB_ENVIRONMENT_CLASS = "org.springframework."
+ "web.servlet.DispatcherServlet";
private static final String ACTIVATE_SERVLET_LISTENER = "org.springframework.test."
+ "context.web.ServletTestExecutionListener.activateListener";
private static final Log logger = LogFactory
.getLog(SpringBootTestContextBootstrapper.class);
@Override
public TestContext buildTestContext() {
TestContext context = super.buildTestContext();
verifyConfiguration(context.getTestClass());
WebEnvironment webEnvironment = getWebEnvironment(context.getTestClass());
if (webEnvironment == WebEnvironment.MOCK
&& deduceWebApplicationType() == WebApplicationType.SERVLET) {
context.setAttribute(ACTIVATE_SERVLET_LISTENER, true);
}
else if (webEnvironment != null && webEnvironment.isEmbedded()) {
context.setAttribute(ACTIVATE_SERVLET_LISTENER, false);
}
return context;
}
@Override
protected Set<Class<? extends TestExecutionListener>> getDefaultTestExecutionListenerClasses() {
Set<Class<? extends TestExecutionListener>> listeners = super.getDefaultTestExecutionListenerClasses();
List<DefaultTestExecutionListenersPostProcessor> postProcessors = SpringFactoriesLoader
.loadFactories(DefaultTestExecutionListenersPostProcessor.class,
getClass().getClassLoader());
for (DefaultTestExecutionListenersPostProcessor postProcessor : postProcessors) {
listeners = postProcessor.postProcessDefaultTestExecutionListeners(listeners);
}
return listeners;
}
@Override
protected ContextLoader resolveContextLoader(Class<?> testClass,
List<ContextConfigurationAttributes> configAttributesList) {
Class<?>[] classes = getClasses(testClass);
if (!ObjectUtils.isEmpty(classes)) {
for (ContextConfigurationAttributes configAttributes : configAttributesList) {
addConfigAttributesClasses(configAttributes, classes);
}
}
return super.resolveContextLoader(testClass, configAttributesList);
}
private void addConfigAttributesClasses(
ContextConfigurationAttributes configAttributes, Class<?>[] classes) {
List<Class<?>> combined = new ArrayList<>();
combined.addAll(Arrays.asList(classes));
if (configAttributes.getClasses() != null) {
combined.addAll(Arrays.asList(configAttributes.getClasses()));
}
configAttributes.setClasses(ClassUtils.toClassArray(combined));
}
@Override
protected Class<? extends ContextLoader> getDefaultContextLoaderClass(
Class<?> testClass) {
return SpringBootContextLoader.class;
}
@Override
protected MergedContextConfiguration processMergedContextConfiguration(
MergedContextConfiguration mergedConfig) {
Class<?>[] classes = getOrFindConfigurationClasses(mergedConfig);
List<String> propertySourceProperties = getAndProcessPropertySourceProperties(
mergedConfig);
mergedConfig = createModifiedConfig(mergedConfig, classes,
StringUtils.toStringArray(propertySourceProperties));
WebEnvironment webEnvironment = getWebEnvironment(mergedConfig.getTestClass());
if (webEnvironment != null && isWebEnvironmentSupported(mergedConfig)) {
WebApplicationType webApplicationType = getWebApplicationType(mergedConfig);
if (webApplicationType == WebApplicationType.SERVLET
&& (webEnvironment.isEmbedded()
|| webEnvironment == WebEnvironment.MOCK)) {
WebAppConfiguration webAppConfiguration = AnnotatedElementUtils
.findMergedAnnotation(mergedConfig.getTestClass(),
WebAppConfiguration.class);
String resourceBasePath = (webAppConfiguration != null
? webAppConfiguration.value() : "src/main/webapp");
mergedConfig = new WebMergedContextConfiguration(mergedConfig,
resourceBasePath);
}
else if (webApplicationType == WebApplicationType.REACTIVE
&& (webEnvironment.isEmbedded()
|| webEnvironment == WebEnvironment.MOCK)) {
return new ReactiveWebMergedContextConfiguration(mergedConfig);
}
}
return mergedConfig;
}
private WebApplicationType getWebApplicationType(
MergedContextConfiguration configuration) {
ConfigurationPropertySource source = new MapConfigurationPropertySource(
TestPropertySourceUtils.convertInlinedPropertiesToMap(
configuration.getPropertySourceProperties()));
Binder binder = new Binder(source);
return binder
.bind("spring.main.web-application-type",
Bindable.of(WebApplicationType.class))
.orElseGet(this::deduceWebApplicationType);
}
private WebApplicationType deduceWebApplicationType() {
if (ClassUtils.isPresent(REACTIVE_WEB_ENVIRONMENT_CLASS, null)
&& !ClassUtils.isPresent(MVC_WEB_ENVIRONMENT_CLASS, null)) {
return WebApplicationType.REACTIVE;
}
for (String className : WEB_ENVIRONMENT_CLASSES) {
if (!ClassUtils.isPresent(className, null)) {
return WebApplicationType.NONE;
}
}
return WebApplicationType.SERVLET;
}
private boolean isWebEnvironmentSupported(MergedContextConfiguration mergedConfig) {
Class<?> testClass = mergedConfig.getTestClass();
ContextHierarchy hierarchy = AnnotationUtils.getAnnotation(testClass,
ContextHierarchy.class);
if (hierarchy == null || hierarchy.value().length == 0) {
return true;
}
ContextConfiguration[] configurations = hierarchy.value();
return isFromConfiguration(mergedConfig,
configurations[configurations.length - 1]);
}
private boolean isFromConfiguration(MergedContextConfiguration candidateConfig,
ContextConfiguration configuration) {
ContextConfigurationAttributes attributes = new ContextConfigurationAttributes(
candidateConfig.getTestClass(), configuration);
Set<Class<?>> configurationClasses = new HashSet<>(
Arrays.asList(attributes.getClasses()));
for (Class<?> candidate : candidateConfig.getClasses()) {
if (configurationClasses.contains(candidate)) {
return true;
}
}
return false;
}
protected Class<?>[] getOrFindConfigurationClasses(
MergedContextConfiguration mergedConfig) {
Class<?>[] classes = mergedConfig.getClasses();
if (containsNonTestComponent(classes) || mergedConfig.hasLocations()) {
return classes;
}
Class<?> found = new SpringBootConfigurationFinder()
.findFromClass(mergedConfig.getTestClass());
Assert.state(found != null,
"Unable to find a @SpringBootConfiguration, you need to use "
+ "@ContextConfiguration or @SpringBootTest(classes=...) "
+ "with your test");
logger.info("Found @SpringBootConfiguration " + found.getName() + " for test "
+ mergedConfig.getTestClass());
return merge(found, classes);
}
private boolean containsNonTestComponent(Class<?>[] classes) {
for (Class<?> candidate : classes) {
if (!AnnotatedElementUtils.isAnnotated(candidate, TestConfiguration.class)) {
return true;
}
}
return false;
}
private Class<?>[] merge(Class<?> head, Class<?>[] existing) {
Class<?>[] result = new Class<?>[existing.length + 1];
result[0] = head;
System.arraycopy(existing, 0, result, 1, existing.length);
return result;
}
private List<String> getAndProcessPropertySourceProperties(
MergedContextConfiguration mergedConfig) {
List<String> propertySourceProperties = new ArrayList<>(
Arrays.asList(mergedConfig.getPropertySourceProperties()));
String differentiator = getDifferentiatorPropertySourceProperty();
if (differentiator != null) {
propertySourceProperties.add(differentiator);
}
processPropertySourceProperties(mergedConfig, propertySourceProperties);
return propertySourceProperties;
}
/**
* Return a "differentiator" property to ensure that there is something to
* differentiate regular tests and bootstrapped tests. Without this property a cached
* context could be returned that wasn't created by this bootstrapper. By default uses
* the bootstrapper class as a property.
* @return the differentiator or {@code null}
*/
protected String getDifferentiatorPropertySourceProperty() {
return getClass().getName() + "=true";
}
/**
* Post process the property source properties, adding or removing elements as
* required.
* @param mergedConfig the merged context configuration
* @param propertySourceProperties the property source properties to process
*/
protected void processPropertySourceProperties(
MergedContextConfiguration mergedConfig,
List<String> propertySourceProperties) {
Class<?> testClass = mergedConfig.getTestClass();
String[] properties = getProperties(testClass);
if (!ObjectUtils.isEmpty(properties)) {
// Added first so that inlined properties from @TestPropertySource take
// precedence
propertySourceProperties.addAll(0, Arrays.asList(properties));
}
if (getWebEnvironment(testClass) == WebEnvironment.RANDOM_PORT) {
propertySourceProperties.add("server.port=0");
}
}
/**
* Return the {@link WebEnvironment} type for this test or null if undefined.
* @param testClass the source test class
* @return the {@link WebEnvironment} or {@code null}
*/
protected WebEnvironment getWebEnvironment(Class<?> testClass) {
SpringBootTest annotation = getAnnotation(testClass);
return (annotation != null ? annotation.webEnvironment() : null);
}
protected Class<?>[] getClasses(Class<?> testClass) {
SpringBootTest annotation = getAnnotation(testClass);
return (annotation != null ? annotation.classes() : null);
}
protected String[] getProperties(Class<?> testClass) {
SpringBootTest annotation = getAnnotation(testClass);
return (annotation != null ? annotation.properties() : null);
}
protected SpringBootTest getAnnotation(Class<?> testClass) {
return AnnotatedElementUtils.getMergedAnnotation(testClass, SpringBootTest.class);
}
protected void verifyConfiguration(Class<?> testClass) {
SpringBootTest springBootTest = getAnnotation(testClass);
if (springBootTest != null
&& (springBootTest.webEnvironment() == WebEnvironment.DEFINED_PORT
|| springBootTest.webEnvironment() == WebEnvironment.RANDOM_PORT)
&& getAnnotation(WebAppConfiguration.class, testClass) != null) {
throw new IllegalStateException("@WebAppConfiguration should only be used "
+ "with @SpringBootTest when @SpringBootTest is configured with a "
+ "mock web environment. Please remove @WebAppConfiguration or "
+ "reconfigure @SpringBootTest.");
}
}
private <T extends Annotation> T getAnnotation(Class<T> annotationType,
Class<?> testClass) {
return AnnotatedElementUtils.getMergedAnnotation(testClass, annotationType);
}
/**
* Create a new {@link MergedContextConfiguration} with different classes.
* @param mergedConfig the source config
* @param classes the replacement classes
* @return a new {@link MergedContextConfiguration}
*/
protected final MergedContextConfiguration createModifiedConfig(
MergedContextConfiguration mergedConfig, Class<?>[] classes) {
return createModifiedConfig(mergedConfig, classes,
mergedConfig.getPropertySourceProperties());
}
/**
* Create a new {@link MergedContextConfiguration} with different classes and
* properties.
* @param mergedConfig the source config
* @param classes the replacement classes
* @param propertySourceProperties the replacement properties
* @return a new {@link MergedContextConfiguration}
*/
protected final MergedContextConfiguration createModifiedConfig(
MergedContextConfiguration mergedConfig, Class<?>[] classes,
String[] propertySourceProperties) {
return new MergedContextConfiguration(mergedConfig.getTestClass(),
mergedConfig.getLocations(), classes,
mergedConfig.getContextInitializerClasses(),
mergedConfig.getActiveProfiles(),
mergedConfig.getPropertySourceLocations(), propertySourceProperties,
mergedConfig.getContextCustomizers(), mergedConfig.getContextLoader(),
getCacheAwareContextLoaderDelegate(), mergedConfig.getParent());
}
}
| bclozel/spring-boot | spring-boot-project/spring-boot-test/src/main/java/org/springframework/boot/test/context/SpringBootTestContextBootstrapper.java | Java | apache-2.0 | 15,659 |
<!DOCTYPE html>
<html>
<head>
<title>Sales Report</title>
<link rel="stylesheet" href="{{asset('core/themes/appui-backend/css/bootstrap.min.css')}}">
</head>
<body>
<div class="h3 text-center">Sales Report</div>
<div class="text-center">Period : {{ $period }}</div>
<br>
<table class="table tabble-striped table-bordered" cellpadding="0" cellspacing="0" border="0" >
<thead>
<tr>
<th width="30px" class="text-center">No.</th>
<th width="100px">Form Number</th>
<th width="125px">Form Date</th>
<th>Customer</th>
<th>Sales</th>
<th class="text-right">Total</th>
</tr>
</thead>
<tbody>
<?php $total_sales = 0; $i=0?>
@foreach($list_sales as $sales)
<tr id="list-{{$sales->id}}" @if($sales->formulir->form_status == -1) style="text-decoration: line-through;" @endif>
<td class="text-center">{{++$i}}</td>
<td>{{ $sales->formulir->form_number }}</td>
<td>{{ date_format_view($sales->formulir->form_date, true) }}</td>
<td>{{ $sales->customer->codeName }}</td>
<td>{{ $sales->formulir->createdBy->name }}</td>
<td class="text-right">{{ number_format_accounting($sales->total) }}</td>
</tr>
@if($sales->formulir->form_status != -1)
<?php $total_sales += $sales->total;?>
@endif
@endforeach
</tbody>
<tfoot>
<tr>
<td colspan="5" class="text-right"><strong>Total</strong></td>
<td class="text-right"><strong>{{ number_format_accounting($total_sales) }}</strong></td>
</tr>
</tfoot>
</table>
</body>
</html>
| bgd-point/point-app-test | packages/point/point-sales/src/views/app/sales/point/pos/report/pdf.blade.php | PHP | apache-2.0 | 1,787 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.impl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.libraries.LibraryEx;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.openapi.vfs.newvfs.events.VFileEvent;
import com.intellij.util.CollectionQuery;
import com.intellij.util.Query;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.MultiMap;
import com.intellij.util.containers.SLRUMap;
import gnu.trove.TObjectIntHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.model.module.JpsModuleSourceRootType;
import java.util.*;
public class RootIndex {
public static final Comparator<OrderEntry> BY_OWNER_MODULE = (o1, o2) -> {
String name1 = o1.getOwnerModule().getName();
String name2 = o2.getOwnerModule().getName();
return name1.compareTo(name2);
};
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.roots.impl.RootIndex");
private static final FileTypeRegistry ourFileTypes = FileTypeRegistry.getInstance();
private final Map<VirtualFile, String> myPackagePrefixByRoot = ContainerUtil.newHashMap();
private final InfoCache myInfoCache;
private final List<JpsModuleSourceRootType<?>> myRootTypes = ContainerUtil.newArrayList();
private final TObjectIntHashMap<JpsModuleSourceRootType<?>> myRootTypeId = new TObjectIntHashMap<>();
@NotNull private final Project myProject;
private final PackageDirectoryCache myPackageDirectoryCache;
private OrderEntryGraph myOrderEntryGraph;
// made public for Upsource
public RootIndex(@NotNull Project project, @NotNull InfoCache cache) {
myProject = project;
myInfoCache = cache;
final RootInfo info = buildRootInfo(project);
MultiMap<String, VirtualFile> rootsByPackagePrefix = MultiMap.create();
Set<VirtualFile> allRoots = info.getAllRoots();
for (VirtualFile root : allRoots) {
List<VirtualFile> hierarchy = getHierarchy(root, allRoots, info);
Pair<DirectoryInfo, String> pair = hierarchy != null
? calcDirectoryInfo(root, hierarchy, info)
: new Pair<>(NonProjectDirectoryInfo.IGNORED, null);
cacheInfos(root, root, pair.first);
rootsByPackagePrefix.putValue(pair.second, root);
myPackagePrefixByRoot.put(root, pair.second);
}
myPackageDirectoryCache = new PackageDirectoryCache(rootsByPackagePrefix) {
@Override
protected boolean isPackageDirectory(@NotNull VirtualFile dir, @NotNull String packageName) {
return getInfoForFile(dir).isInProject() && packageName.equals(getPackageName(dir));
}
};
}
public void onLowMemory() {
myPackageDirectoryCache.onLowMemory();
}
@NotNull
private RootInfo buildRootInfo(@NotNull Project project) {
final RootInfo info = new RootInfo();
for (final Module module : ModuleManager.getInstance(project).getModules()) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
for (final VirtualFile contentRoot : moduleRootManager.getContentRoots()) {
if (!info.contentRootOf.containsKey(contentRoot) && ensureValid(contentRoot, module)) {
info.contentRootOf.put(contentRoot, module);
}
}
for (ContentEntry contentEntry : moduleRootManager.getContentEntries()) {
if (!(contentEntry instanceof ContentEntryImpl) || !((ContentEntryImpl)contentEntry).isDisposed()) {
for (VirtualFile excludeRoot : contentEntry.getExcludeFolderFiles()) {
if (!ensureValid(excludeRoot, contentEntry)) continue;
info.excludedFromModule.put(excludeRoot, module);
}
}
// Init module sources
for (final SourceFolder sourceFolder : contentEntry.getSourceFolders()) {
final VirtualFile sourceFolderRoot = sourceFolder.getFile();
if (sourceFolderRoot != null && ensureValid(sourceFolderRoot, sourceFolder)) {
info.rootTypeId.put(sourceFolderRoot, getRootTypeId(sourceFolder.getRootType()));
info.classAndSourceRoots.add(sourceFolderRoot);
info.sourceRootOf.putValue(sourceFolderRoot, module);
info.packagePrefix.put(sourceFolderRoot, sourceFolder.getPackagePrefix());
}
}
}
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof LibraryOrSdkOrderEntry) {
final LibraryOrSdkOrderEntry entry = (LibraryOrSdkOrderEntry)orderEntry;
final VirtualFile[] sourceRoots = entry.getRootFiles(OrderRootType.SOURCES);
final VirtualFile[] classRoots = entry.getRootFiles(OrderRootType.CLASSES);
// Init library sources
for (final VirtualFile sourceRoot : sourceRoots) {
if (!ensureValid(sourceRoot, entry)) continue;
info.classAndSourceRoots.add(sourceRoot);
info.libraryOrSdkSources.add(sourceRoot);
info.packagePrefix.put(sourceRoot, "");
}
// init library classes
for (final VirtualFile classRoot : classRoots) {
if (!ensureValid(classRoot, entry)) continue;
info.classAndSourceRoots.add(classRoot);
info.libraryOrSdkClasses.add(classRoot);
info.packagePrefix.put(classRoot, "");
}
if (orderEntry instanceof LibraryOrderEntry) {
Library library = ((LibraryOrderEntry)orderEntry).getLibrary();
if (library != null) {
for (VirtualFile root : ((LibraryEx)library).getExcludedRoots()) {
if (!ensureValid(root, library)) continue;
info.excludedFromLibraries.putValue(root, library);
}
for (VirtualFile root : sourceRoots) {
if (!ensureValid(root, library)) continue;
info.sourceOfLibraries.putValue(root, library);
}
for (VirtualFile root : classRoots) {
if (!ensureValid(root, library)) continue;
info.classOfLibraries.putValue(root, library);
}
}
}
}
}
}
for (AdditionalLibraryRootsProvider provider : Extensions.getExtensions(AdditionalLibraryRootsProvider.EP_NAME)) {
Collection<SyntheticLibrary> libraries = provider.getAdditionalProjectLibraries(project);
for (SyntheticLibrary descriptor : libraries) {
for (VirtualFile root : descriptor.getSourceRoots()) {
if (!ensureValid(root, project)) continue;
info.libraryOrSdkSources.add(root);
info.classAndSourceRoots.add(root);
info.sourceOfLibraries.putValue(root, descriptor);
}
for (VirtualFile file : descriptor.getExcludedRoots()) {
if (!ensureValid(file, project)) continue;
info.excludedFromLibraries.putValue(file, descriptor);
}
}
}
for (DirectoryIndexExcludePolicy policy : Extensions.getExtensions(DirectoryIndexExcludePolicy.EP_NAME, project)) {
info.excludedFromProject.addAll(ContainerUtil.filter(policy.getExcludeRootsForProject(), file -> ensureValid(file, policy)));
}
return info;
}
private static boolean ensureValid(@NotNull VirtualFile file, @NotNull Object container) {
if (!(file instanceof VirtualFileWithId)) {
//skip roots from unsupported file systems (e.g. http)
return false;
}
if (!file.isValid()) {
LOG.error("Invalid root " + file + " in " + container);
return false;
}
return true;
}
@NotNull
private synchronized OrderEntryGraph getOrderEntryGraph() {
if (myOrderEntryGraph == null) {
RootInfo rootInfo = buildRootInfo(myProject);
myOrderEntryGraph = new OrderEntryGraph(myProject, rootInfo);
}
return myOrderEntryGraph;
}
/**
* A reverse dependency graph of (library, jdk, module, module source) -> (module).
*
* <p>Each edge carries with it the associated OrderEntry that caused the dependency.
*/
private static class OrderEntryGraph {
private static class Edge {
Module myKey;
ModuleOrderEntry myOrderEntry; // Order entry from myKey -> the node containing the edge
boolean myRecursive; // Whether this edge should be descended into during graph walk
public Edge(Module key, ModuleOrderEntry orderEntry, boolean recursive) {
myKey = key;
myOrderEntry = orderEntry;
myRecursive = recursive;
}
@Override
public String toString() {
return myOrderEntry.toString();
}
}
private static class Node {
Module myKey;
List<Edge> myEdges = new ArrayList<>();
@Override
public String toString() {
return myKey.toString();
}
}
private static class Graph {
Map<Module, Node> myNodes = new HashMap<>();
}
final Project myProject;
final RootInfo myRootInfo;
final Set<VirtualFile> myAllRoots;
Graph myGraph;
MultiMap<VirtualFile, Node> myRoots; // Map of roots to their root nodes, eg. library jar -> library node
final SynchronizedSLRUCache<VirtualFile, List<OrderEntry>> myCache;
private MultiMap<VirtualFile, OrderEntry> myLibClassRootEntries;
private MultiMap<VirtualFile, OrderEntry> myLibSourceRootEntries;
public OrderEntryGraph(Project project, RootInfo rootInfo) {
myProject = project;
myRootInfo = rootInfo;
myAllRoots = myRootInfo.getAllRoots();
int cacheSize = Math.max(25, (myAllRoots.size() / 100) * 2);
myCache = new SynchronizedSLRUCache<VirtualFile, List<OrderEntry>>(cacheSize, cacheSize) {
@NotNull
@Override
public List<OrderEntry> createValue(VirtualFile key) {
return collectOrderEntries(key);
}
};
initGraph();
initLibraryRoots();
}
private void initGraph() {
Graph graph = new Graph();
MultiMap<VirtualFile, Node> roots = MultiMap.createSmart();
for (final Module module : ModuleManager.getInstance(myProject).getModules()) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
List<OrderEnumerationHandler> handlers = OrderEnumeratorBase.getCustomHandlers(module);
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof ModuleOrderEntry) {
ModuleOrderEntry moduleOrderEntry = (ModuleOrderEntry)orderEntry;
final Module depModule = moduleOrderEntry.getModule();
if (depModule != null) {
Node node = graph.myNodes.get(depModule);
OrderEnumerator en = OrderEnumerator.orderEntries(depModule).exportedOnly();
if (node == null) {
node = new Node();
node.myKey = depModule;
graph.myNodes.put(depModule, node);
VirtualFile[] importedClassRoots = en.classes().usingCache().getRoots();
for (VirtualFile importedClassRoot : importedClassRoots) {
roots.putValue(importedClassRoot, node);
}
VirtualFile[] importedSourceRoots = en.sources().usingCache().getRoots();
for (VirtualFile sourceRoot : importedSourceRoots) {
roots.putValue(sourceRoot, node);
}
}
boolean shouldRecurse = en.recursively().shouldRecurse(moduleOrderEntry, handlers);
node.myEdges.add(new Edge(module, moduleOrderEntry, shouldRecurse));
}
}
}
}
myGraph = graph;
myRoots = roots;
}
private void initLibraryRoots() {
MultiMap<VirtualFile, OrderEntry> libClassRootEntries = MultiMap.createSmart();
MultiMap<VirtualFile, OrderEntry> libSourceRootEntries = MultiMap.createSmart();
for (final Module module : ModuleManager.getInstance(myProject).getModules()) {
final ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module);
for (OrderEntry orderEntry : moduleRootManager.getOrderEntries()) {
if (orderEntry instanceof LibraryOrSdkOrderEntry) {
final LibraryOrSdkOrderEntry entry = (LibraryOrSdkOrderEntry)orderEntry;
for (final VirtualFile sourceRoot : entry.getRootFiles(OrderRootType.SOURCES)) {
libSourceRootEntries.putValue(sourceRoot, orderEntry);
}
for (final VirtualFile classRoot : entry.getRootFiles(OrderRootType.CLASSES)) {
libClassRootEntries.putValue(classRoot, orderEntry);
}
}
}
}
myLibClassRootEntries = libClassRootEntries;
myLibSourceRootEntries = libSourceRootEntries;
}
private List<OrderEntry> getOrderEntries(@NotNull VirtualFile file) {
return myCache.get(file);
}
/**
* Traverses the graph from the given file, collecting all encountered order entries.
*/
private List<OrderEntry> collectOrderEntries(@NotNull VirtualFile file) {
List<VirtualFile> roots = getHierarchy(file, myAllRoots, myRootInfo);
if (roots == null) {
return Collections.emptyList();
}
List<OrderEntry> result = new ArrayList<>();
Stack<Node> stack = new Stack<>();
for (VirtualFile root : roots) {
Collection<Node> nodes = myRoots.get(root);
for (Node node : nodes) {
stack.push(node);
}
}
Set<Node> seen = new HashSet<>();
while (!stack.isEmpty()) {
Node node = stack.pop();
if (seen.contains(node)) {
continue;
}
seen.add(node);
for (Edge edge : node.myEdges) {
result.add(edge.myOrderEntry);
if (edge.myRecursive) {
Node targetNode = myGraph.myNodes.get(edge.myKey);
if (targetNode != null) {
stack.push(targetNode);
}
}
}
}
@Nullable VirtualFile libraryClassRoot = myRootInfo.findLibraryRootInfo(roots, false);
@Nullable VirtualFile librarySourceRoot = myRootInfo.findLibraryRootInfo(roots, true);
result.addAll(myRootInfo.getLibraryOrderEntries(roots, libraryClassRoot, librarySourceRoot, myLibClassRootEntries, myLibSourceRootEntries));
VirtualFile moduleContentRoot = myRootInfo.findNearestContentRoot(roots);
if (moduleContentRoot != null) {
ContainerUtil.addIfNotNull(result, myRootInfo.getModuleSourceEntry(roots, moduleContentRoot, myLibClassRootEntries));
}
Collections.sort(result, BY_OWNER_MODULE);
return result;
}
}
private int getRootTypeId(@NotNull JpsModuleSourceRootType<?> rootType) {
if (myRootTypeId.containsKey(rootType)) {
return myRootTypeId.get(rootType);
}
int id = myRootTypes.size();
if (id > DirectoryInfoImpl.MAX_ROOT_TYPE_ID) {
LOG.error("Too many different types of module source roots (" + id + ") registered: " + myRootTypes);
}
myRootTypes.add(rootType);
myRootTypeId.put(rootType, id);
return id;
}
@NotNull
public DirectoryInfo getInfoForFile(@NotNull VirtualFile file) {
if (!file.isValid()) {
return NonProjectDirectoryInfo.INVALID;
}
VirtualFile dir;
if (!file.isDirectory()) {
DirectoryInfo info = myInfoCache.getCachedInfo(file);
if (info != null) {
return info;
}
if (ourFileTypes.isFileIgnored(file)) {
return NonProjectDirectoryInfo.IGNORED;
}
dir = file.getParent();
}
else {
dir = file;
}
int count = 0;
for (VirtualFile root = dir; root != null; root = root.getParent()) {
if (++count > 1000) {
throw new IllegalStateException("Possible loop in tree, started at " + dir.getName());
}
DirectoryInfo info = myInfoCache.getCachedInfo(root);
if (info != null) {
if (!dir.equals(root)) {
cacheInfos(dir, root, info);
}
return info;
}
if (ourFileTypes.isFileIgnored(root)) {
return cacheInfos(dir, root, NonProjectDirectoryInfo.IGNORED);
}
}
return cacheInfos(dir, null, NonProjectDirectoryInfo.NOT_UNDER_PROJECT_ROOTS);
}
@NotNull
private DirectoryInfo cacheInfos(VirtualFile dir, @Nullable VirtualFile stopAt, @NotNull DirectoryInfo info) {
while (dir != null) {
myInfoCache.cacheInfo(dir, info);
if (dir.equals(stopAt)) {
break;
}
dir = dir.getParent();
}
return info;
}
@NotNull
public Query<VirtualFile> getDirectoriesByPackageName(@NotNull final String packageName, final boolean includeLibrarySources) {
// Note that this method is used in upsource as well, hence, don't reduce this method's visibility.
List<VirtualFile> result = myPackageDirectoryCache.getDirectoriesByPackageName(packageName);
if (!includeLibrarySources) {
result = ContainerUtil.filter(result, file -> {
DirectoryInfo info = getInfoForFile(file);
return info.isInProject() && (!info.isInLibrarySource() || info.isInModuleSource() || info.hasLibraryClassRoot());
});
}
return new CollectionQuery<>(result);
}
@Nullable
public String getPackageName(@NotNull final VirtualFile dir) {
if (dir.isDirectory()) {
if (ourFileTypes.isFileIgnored(dir)) {
return null;
}
if (myPackagePrefixByRoot.containsKey(dir)) {
return myPackagePrefixByRoot.get(dir);
}
final VirtualFile parent = dir.getParent();
if (parent != null) {
return getPackageNameForSubdir(getPackageName(parent), dir.getName());
}
}
return null;
}
@Nullable
protected static String getPackageNameForSubdir(@Nullable String parentPackageName, @NotNull String subdirName) {
if (parentPackageName == null) return null;
return parentPackageName.isEmpty() ? subdirName : parentPackageName + "." + subdirName;
}
@Nullable
public JpsModuleSourceRootType<?> getSourceRootType(@NotNull DirectoryInfo directoryInfo) {
return myRootTypes.get(directoryInfo.getSourceRootTypeId());
}
boolean resetOnEvents(@NotNull List<? extends VFileEvent> events) {
for (VFileEvent event : events) {
VirtualFile file = event.getFile();
if (file == null || file.isDirectory()) {
return true;
}
}
return false;
}
@Nullable("returns null only if dir is under ignored folder")
private static List<VirtualFile> getHierarchy(VirtualFile dir, @NotNull Set<VirtualFile> allRoots, @NotNull RootInfo info) {
List<VirtualFile> hierarchy = ContainerUtil.newArrayList();
boolean hasContentRoots = false;
while (dir != null) {
hasContentRoots |= info.contentRootOf.get(dir) != null;
if (!hasContentRoots && ourFileTypes.isFileIgnored(dir)) {
return null;
}
if (allRoots.contains(dir)) {
hierarchy.add(dir);
}
dir = dir.getParent();
}
return hierarchy;
}
private static class RootInfo {
// getDirectoriesByPackageName used to be in this order, some clients might rely on that
@NotNull final LinkedHashSet<VirtualFile> classAndSourceRoots = ContainerUtil.newLinkedHashSet();
@NotNull final Set<VirtualFile> libraryOrSdkSources = ContainerUtil.newHashSet();
@NotNull final Set<VirtualFile> libraryOrSdkClasses = ContainerUtil.newHashSet();
@NotNull final Map<VirtualFile, Module> contentRootOf = ContainerUtil.newHashMap();
@NotNull final MultiMap<VirtualFile, Module> sourceRootOf = MultiMap.createSet();
@NotNull final TObjectIntHashMap<VirtualFile> rootTypeId = new TObjectIntHashMap<>();
@NotNull final MultiMap<VirtualFile, /*Library|SyntheticLibrary*/ Object> excludedFromLibraries = MultiMap.createSmart();
@NotNull final MultiMap<VirtualFile, Library> classOfLibraries = MultiMap.createSmart();
@NotNull final MultiMap<VirtualFile, /*Library|SyntheticLibrary*/ Object> sourceOfLibraries = MultiMap.createSmart();
@NotNull final Set<VirtualFile> excludedFromProject = ContainerUtil.newHashSet();
@NotNull final Map<VirtualFile, Module> excludedFromModule = ContainerUtil.newHashMap();
@NotNull final Map<VirtualFile, String> packagePrefix = ContainerUtil.newHashMap();
@NotNull
Set<VirtualFile> getAllRoots() {
LinkedHashSet<VirtualFile> result = ContainerUtil.newLinkedHashSet();
result.addAll(classAndSourceRoots);
result.addAll(contentRootOf.keySet());
result.addAll(excludedFromLibraries.keySet());
result.addAll(excludedFromModule.keySet());
result.addAll(excludedFromProject);
return result;
}
/**
* Returns nearest content root for a file by its parent directories hierarchy. If the file is excluded (i.e. located under an excluded
* root and there are no source roots on the path to the excluded root) returns {@code null}.
*/
@Nullable
private VirtualFile findNearestContentRoot(@NotNull List<VirtualFile> hierarchy) {
Collection<Module> sourceRootOwners = null;
boolean underExcludedSourceRoot = false;
for (VirtualFile root : hierarchy) {
Module module = contentRootOf.get(root);
Module excludedFrom = excludedFromModule.get(root);
if (module != null && (excludedFrom != module || underExcludedSourceRoot && sourceRootOwners.contains(module))) {
return root;
}
if (excludedFrom != null || excludedFromProject.contains(root)) {
if (sourceRootOwners != null) {
underExcludedSourceRoot = true;
}
else {
return null;
}
}
if (!underExcludedSourceRoot && sourceRootOf.containsKey(root)) {
Collection<Module> modulesForSourceRoot = sourceRootOf.get(root);
if (!modulesForSourceRoot.isEmpty()) {
if (sourceRootOwners == null) {
sourceRootOwners = modulesForSourceRoot;
}
else {
sourceRootOwners = ContainerUtil.union(sourceRootOwners, modulesForSourceRoot);
}
}
}
}
return null;
}
@Nullable
private VirtualFile findNearestContentRootForExcluded(@NotNull List<VirtualFile> hierarchy) {
for (VirtualFile root : hierarchy) {
if (contentRootOf.containsKey(root)) {
return root;
}
}
return null;
}
@Nullable
private VirtualFile findLibraryRootInfo(@NotNull List<VirtualFile> hierarchy, boolean source) {
Set<Object> librariesToIgnore = ContainerUtil.newHashSet();
for (VirtualFile root : hierarchy) {
librariesToIgnore.addAll(excludedFromLibraries.get(root));
if (source && libraryOrSdkSources.contains(root) &&
(!sourceOfLibraries.containsKey(root) || !librariesToIgnore.containsAll(sourceOfLibraries.get(root)))) {
return root;
}
else if (!source && libraryOrSdkClasses.contains(root) &&
(!classOfLibraries.containsKey(root) || !librariesToIgnore.containsAll(classOfLibraries.get(root)))) {
return root;
}
}
return null;
}
private String calcPackagePrefix(@NotNull VirtualFile root,
@NotNull List<VirtualFile> hierarchy,
VirtualFile moduleContentRoot,
VirtualFile libraryClassRoot,
VirtualFile librarySourceRoot) {
VirtualFile packageRoot = findPackageRootInfo(hierarchy, moduleContentRoot, libraryClassRoot, librarySourceRoot);
String prefix = packagePrefix.get(packageRoot);
if (prefix != null && !root.equals(packageRoot)) {
assert packageRoot != null;
String relative = VfsUtilCore.getRelativePath(root, packageRoot, '.');
prefix = StringUtil.isEmpty(prefix) ? relative : prefix + '.' + relative;
}
return prefix;
}
@Nullable
private VirtualFile findPackageRootInfo(@NotNull List<VirtualFile> hierarchy,
VirtualFile moduleContentRoot,
VirtualFile libraryClassRoot,
VirtualFile librarySourceRoot) {
for (VirtualFile root : hierarchy) {
if (moduleContentRoot != null &&
sourceRootOf.get(root).contains(contentRootOf.get(moduleContentRoot)) &&
librarySourceRoot == null) {
return root;
}
if (root.equals(libraryClassRoot) || root.equals(librarySourceRoot)) {
return root;
}
if (root.equals(moduleContentRoot) && !sourceRootOf.containsKey(root) && librarySourceRoot == null && libraryClassRoot == null) {
return null;
}
}
return null;
}
@NotNull
private LinkedHashSet<OrderEntry> getLibraryOrderEntries(@NotNull List<VirtualFile> hierarchy,
@Nullable VirtualFile libraryClassRoot,
@Nullable VirtualFile librarySourceRoot,
@NotNull MultiMap<VirtualFile, OrderEntry> libClassRootEntries,
@NotNull MultiMap<VirtualFile, OrderEntry> libSourceRootEntries) {
LinkedHashSet<OrderEntry> orderEntries = ContainerUtil.newLinkedHashSet();
for (VirtualFile root : hierarchy) {
if (root.equals(libraryClassRoot) && !sourceRootOf.containsKey(root)) {
orderEntries.addAll(libClassRootEntries.get(root));
}
if (root.equals(librarySourceRoot) && libraryClassRoot == null) {
orderEntries.addAll(libSourceRootEntries.get(root));
}
if (libClassRootEntries.containsKey(root) || sourceRootOf.containsKey(root) && librarySourceRoot == null) {
break;
}
}
return orderEntries;
}
@Nullable
private ModuleSourceOrderEntry getModuleSourceEntry(@NotNull List<VirtualFile> hierarchy,
@NotNull VirtualFile moduleContentRoot,
@NotNull MultiMap<VirtualFile, OrderEntry> libClassRootEntries) {
Module module = contentRootOf.get(moduleContentRoot);
for (VirtualFile root : hierarchy) {
if (sourceRootOf.get(root).contains(module)) {
return ContainerUtil.findInstance(ModuleRootManager.getInstance(module).getOrderEntries(), ModuleSourceOrderEntry.class);
}
if (libClassRootEntries.containsKey(root)) {
return null;
}
}
return null;
}
}
@NotNull
private static Pair<DirectoryInfo, String> calcDirectoryInfo(@NotNull final VirtualFile root,
@NotNull final List<VirtualFile> hierarchy,
@NotNull RootInfo info) {
VirtualFile moduleContentRoot = info.findNearestContentRoot(hierarchy);
VirtualFile libraryClassRoot = info.findLibraryRootInfo(hierarchy, false);
VirtualFile librarySourceRoot = info.findLibraryRootInfo(hierarchy, true);
boolean inProject = moduleContentRoot != null || libraryClassRoot != null || librarySourceRoot != null;
VirtualFile nearestContentRoot;
if (inProject) {
nearestContentRoot = moduleContentRoot;
}
else {
nearestContentRoot = info.findNearestContentRootForExcluded(hierarchy);
if (nearestContentRoot == null) {
return new Pair<>(NonProjectDirectoryInfo.EXCLUDED, null);
}
}
VirtualFile sourceRoot = info.findPackageRootInfo(hierarchy, moduleContentRoot, null, librarySourceRoot);
VirtualFile moduleSourceRoot = info.findPackageRootInfo(hierarchy, moduleContentRoot, null, null);
boolean inModuleSources = moduleSourceRoot != null;
boolean inLibrarySource = librarySourceRoot != null;
int typeId = moduleSourceRoot != null ? info.rootTypeId.get(moduleSourceRoot) : 0;
Module module = info.contentRootOf.get(nearestContentRoot);
DirectoryInfo directoryInfo =
new DirectoryInfoImpl(root, module, nearestContentRoot, sourceRoot, libraryClassRoot, inModuleSources, inLibrarySource, !inProject, typeId);
String packagePrefix = info.calcPackagePrefix(root, hierarchy, moduleContentRoot, libraryClassRoot, librarySourceRoot);
return Pair.create(directoryInfo, packagePrefix);
}
@NotNull
public List<OrderEntry> getOrderEntries(@NotNull DirectoryInfo info) {
if (!(info instanceof DirectoryInfoImpl)) return Collections.emptyList();
return getOrderEntryGraph().getOrderEntries(((DirectoryInfoImpl)info).getRoot());
}
public interface InfoCache {
@Nullable
DirectoryInfo getCachedInfo(@NotNull VirtualFile dir);
void cacheInfo(@NotNull VirtualFile dir, @NotNull DirectoryInfo info);
}
/**
* An LRU cache with synchronization around the primary cache operations (get() and insertion
* of a newly created value). Other map operations are not synchronized.
*/
abstract static class SynchronizedSLRUCache<K, V> extends SLRUMap<K,V> {
protected final Object myLock = new Object();
protected SynchronizedSLRUCache(final int protectedQueueSize, final int probationalQueueSize) {
super(protectedQueueSize, probationalQueueSize);
}
@NotNull
public abstract V createValue(K key);
@Override
@NotNull
public V get(K key) {
V value;
synchronized (myLock) {
value = super.get(key);
if (value != null) {
return value;
}
}
value = createValue(key);
synchronized (myLock) {
put(key, value);
}
return value;
}
}
}
| semonte/intellij-community | platform/projectModel-impl/src/com/intellij/openapi/roots/impl/RootIndex.java | Java | apache-2.0 | 31,099 |
// Copyright 2017 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package com.twitter.intellij.pants.execution;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.twitter.intellij.pants.testFramework.OSSPantsIntegrationTest;
import com.twitter.intellij.pants.util.TempFile;
import java.io.IOException;
import java.util.Optional;
import static com.twitter.intellij.pants.execution.PantsMakeBeforeRun.ERROR_TAG;
// Need to extned `OSSPantsIntegrationTest` because LocalFileSystem needs to be initialized.
public class PantsMakeMessageTest extends OSSPantsIntegrationTest {
public void testErrorMessageWithFilePath() {
try (TempFile tempFile = TempFile.create("pants_export_run", ".out")) {
Optional<PantsMakeBeforeRun.ParseResult> result = PantsMakeBeforeRun.ParseResult.parseErrorLocation(
" [error] " + tempFile.getFile().getAbsolutePath() + ":23:1: cannot find symbol",
ERROR_TAG
);
assertTrue(result.isPresent());
assertEquals(
LocalFileSystem.getInstance()
.findFileByIoFile(tempFile.getFile()),
result.get().getFile()
);
assertEquals(23, result.get().getLineNumber());
assertEquals(1, result.get().getColumnNumber());
}
catch (IOException e) {
// Fall-through to handle outside the block.
}
}
public void testErrorMessageWithStrangerFilePath() {
try (TempFile filePathWithSpace = TempFile.create("pants_exp ort_run", ".out")) {
Optional<PantsMakeBeforeRun.ParseResult> result = PantsMakeBeforeRun.ParseResult.parseErrorLocation(
" [error] " + filePathWithSpace.getFile().getAbsolutePath() + ":23:1: cannot find symbol",
ERROR_TAG
);
assertTrue(result.isPresent());
assertEquals(
LocalFileSystem.getInstance()
.findFileByIoFile(filePathWithSpace.getFile()),
result.get().getFile()
);
assertEquals(23, result.get().getLineNumber());
assertEquals(1, result.get().getColumnNumber());
}
catch (IOException e) {
// Fall-through to handle outside the block.
}
}
public void testErrorMessageWithInvalidFilePath() {
Optional<PantsMakeBeforeRun.ParseResult> result = PantsMakeBeforeRun.ParseResult.parseErrorLocation(
" [error] /non/existent/file/path:23:1: cannot find symbol",
ERROR_TAG
);
assertFalse(result.isPresent());
}
public void testErrorMessageWithNoFilePath() {
Optional<PantsMakeBeforeRun.ParseResult> result = PantsMakeBeforeRun.ParseResult.parseErrorLocation(
" [error] String greeting = Greeting.greetFromRXesource(\"org/pantsbuild/example/hello/world.txt\");\n",
ERROR_TAG
);
assertFalse(result.isPresent());
}
public void testErrorMessageWithTwoColons() {
Optional<PantsMakeBeforeRun.ParseResult> result = PantsMakeBeforeRun.ParseResult.parseErrorLocation(
"[error] Assert.assertEquals(\"0:00:00.000\", ManagementUtils.getAverageAge(d0));",
ERROR_TAG
);
assertFalse(result.isPresent());
}
}
| wisechengyi/intellij-pants-plugin | tests/com/twitter/intellij/pants/execution/PantsMakeMessageTest.java | Java | apache-2.0 | 3,201 |
L.Polygon.polygonEditor = L.Polygon.extend({
_prepareMapIfNeeded: function() {
var that = this;
if(this._map._editablePolygons != null) {
return;
}
// Container for all editable polylines on this map:
this._map._editablePolygons = [];
// Click anywhere on map to add a new point-polyline:
if(this._options.newPolygons) {
// console.log('click na map');
that._map.on('click', function(event) {
// console.log('click, target=' + (event.target == that._map) + ' type=' + event.type);
if(that.isBusy())
return;
that._setBusy(true);
var latLng = event.latlng;
if(that._options.newPolygonConfirmMessage)
if(!confirm(that._options.newPolygonConfirmMessage))
return
var contexts = [{'originalPolygonNo': null, 'originalPointNo': null}];
L.Polygon.PolygonEditor([latLng], that._options, contexts).addTo(that._map);
that._setBusy(false);
that._showBoundMarkers();
});
}
},
/**
* Will add all needed methods to this polyline.
*/
_addMethods: function() {
var that = this;
this._init = function(options, contexts) {
this._prepareMapIfNeeded();
/*
* Utility method added to this map to retreive editable
* polylines.
*/
if(!this._map.getEditablePolylines) {
this._map.getEditablePolylines = function() {
return that._map._editablePolygons;
}
}
/**
* Since all point editing is done by marker events, markers
* will be the main holder of the polyline points locations.
* Every marker contains a reference to the newPointMarker
* *before* him (=> the first marker has newPointMarker=null).
*/
this._parseOptions(options);
this._setMarkers();
var map = this._map;
this._map.on("zoomend", function(e) {
that._showBoundMarkers();
});
this._map.on("moveend", function(e) {
that._showBoundMarkers();
});
this._lastMouseEvent = undefined;
if('_desiredPolygonNo' in this) {
this._map._editablePolygons.splice(this._desiredPolygonNo, 0, this);
} else {
this._map._editablePolygons.push(this);
}
};
/**
* Check if there is *any* busy editable polyline on this map.
*/
this.isBusy = function() {
for(var i = 0; i < that._map._editablePolygons.length; i++)
if(that._map._editablePolygons[i]._isBusy())
return true;
return false;
};
/**
* Check if is busy adding/moving new nodes. Note, there may be
* *other* editable polylines on the same map which *are* busy.
*/
this._isBusy = function() {
return that._busy;
};
this._setBusy = function(busy) {
that._busy = busy;
};
/**
* Get markers for this polyline.
*/
this.getPoints = function() {
return this._markers;
};
this._parseOptions = function(options) {
if(!options)
options = {};
// Do not show edit markers if more than maxMarkers would be shown:
if(!('maxMarkers' in options)) {
options.maxMarkers = 100;
}
this.maxMarkers = options.maxMarkers;
// Do not allow edges to be destroyed (split polygon in two)
if(!('deletableEdges' in options)) {
options.deletableEdges = false;
}
this.deletableEdges = options.deletableEdges;
// Icons:
if(options.pointIcon) {
this.pointIcon = options.pointIcon;
} else {
this.pointIcon = L.icon({ iconUrl: 'editmarker.png', iconSize: [11, 11], iconAnchor: [6, 6] });
}
if(options.newPointIcon) {
this.newPointIcon = options.newPointIcon;
} else {
this.newPointIcon = L.icon({ iconUrl: 'editmarker2.png', iconSize: [11, 11], iconAnchor: [6, 6] });
}
};
/**
* Show only markers in current map bounds *is* there are only a certain
* number of markers. This method is called on eventy that change map
* bounds.
*/
this._showBoundMarkers = function() {
if(that.isBusy()) {
//console.log('Do not show because busy!');
return;
}
var bounds = that._map.getBounds();
var found = 0;
for(var polygonNo in that._map._editablePolygons) {
var polyline = that._map._editablePolygons[polygonNo];
for(var markerNo in polyline._markers) {
var marker = polyline._markers[markerNo];
if(bounds.contains(marker.getLatLng()))
found += 1;
}
}
//console.log('found=' + found);
for(var polygonNo in that._map._editablePolygons) {
var polyline = that._map._editablePolygons[polygonNo];
for(var markerNo in polyline._markers) {
var marker = polyline._markers[markerNo];
if(found < that.maxMarkers) {
that._setMarkerVisible(marker, bounds.contains(marker.getLatLng()));
that._setMarkerVisible(marker.newPointMarker, bounds.contains(marker.getLatLng()));
} else {
that._setMarkerVisible(marker, false);
that._setMarkerVisible(marker.newPointMarker, false);
}
}
}
};
/**
* Used when adding/moving points in order to disable the user to mess
* with other markers (+ easier to decide where to put the point
* without too many markers).
*/
this._hideAll = function(except) {
for(var polygonNo in that._map._editablePolygons) {
//console.log("hide " + polygonNo + " markers");
var polyline = that._map._editablePolygons[polygonNo];
for(var markerNo in polyline._markers) {
var marker = polyline._markers[markerNo];
if(except == null || except != marker)
polyline._setMarkerVisible(marker, false);
if(except == null || except != marker.newPointMarker)
polyline._setMarkerVisible(marker.newPointMarker, false);
}
}
}
/**
* Show/hide marker.
*/
this._setMarkerVisible = function(marker, show) {
if(!marker)
return;
var map = this._map;
if(show) {
if(!marker._visible) {
if(!marker._map) { // First show fo this marker:
marker.addTo(map);
} else { // Marker was already shown and hidden:
map.addLayer(marker);
}
marker._map = map;
}
marker._visible = true;
} else {
if(marker._visible) {
map.removeLayer(marker);
}
marker._visible = false;
}
};
this.updateLatLngs = function (latlngs) {
this._eraseMarkers();
this.setLatLngs(latlngs);
that._setMarkers();
this._reloadPolygon();
return this;
}
/**
* Reload polyline. If it is busy, then the bound markers will not be
* shown. Call _setBusy(false) before this method!
*/
this._reloadPolygon = function(fixAroundPointNo) {
// that._setMarkers();
that.setLatLngs(that._getMarkerLatLngs());
if(fixAroundPointNo != null)
that._fixNeighbourPositions(fixAroundPointNo);
that._showBoundMarkers();
}
/**
* Reload polyline. If it is busy, then the bound markers will not be
* shown. Call _setBusy(false) before this method!
*/
this._setMarkers = function() {
this._markers = [];
var that = this;
var points = this.getLatLngs();
var length = points.length;
for(var i = 0; i < length; i++) {
var marker = this._addMarkers(i, points[i]);
if(! ('context' in marker)) {
marker.context = {}
if(that._contexts != null) {
marker.context = contexts[i];
}
}
if(marker.context && ! ('originalPointNo' in marker.context))
marker.context.originalPointNo = i;
if(marker.context && ! ('originalPolygonNo' in marker.context))
marker.context.originalPolygonNo = that._map._editablePolygons.length;
}
}
/**
* Reload polyline. If it is busy, then the bound markers will not be
* shown. Call _setBusy(false) before this method!
*/
this._eraseMarkers = function() {
var that = this;
var points = this._markers;
var length = points.length;
for(var i = 0; i < length; i++) {
var marker = points[i];
this._map.removeLayer(marker.newPointMarker);
this._map.removeLayer(marker);
}
this._markers = [];
}
/**
* Add two markers (a point marker and his newPointMarker) for a
* single point.
*
* Markers are not added on the map here, the marker.addTo(map) is called
* only later when needed first time because of performance issues.
*/
this._addMarkers = function(pointNo, latLng, fixNeighbourPositions) {
var that = this;
var points = this.getLatLngs();
var marker = L.marker(latLng, {draggable: true, icon: this.pointIcon});
marker.newPointMarker = null;
marker.on('mousedown', function (e) {
that._lastMouseEvent = e.originalEvent;
});
marker.on('dragstart', function(event) {
var pointNo = that._getPointNo(event.target);
//console.log("pointNo", pointNo);
var previousPoint = pointNo == null ? null : (pointNo - 1 >= 0 ? that._markers[pointNo - 1].getLatLng() : that._markers[that._markers.length - 1].getLatLng());
var nextPoint = pointNo < that._markers.length - 1 ? that._markers[pointNo + 1].getLatLng() : that._markers[0].getLatLng();
that._edited = true;
that._setupDragLines(marker, previousPoint, nextPoint);
that._setBusy(true);
that._hideAll(marker);
});
marker.on('dragend', function(event) {
that._lastMouseEvent = undefined;
var marker = event.target;
var pointNo = that._getPointNo(event.target);
setTimeout(function() {
that._setBusy(false);
that._reloadPolygon(pointNo);
}, 25);
});
// deleting in click and context menu to allow for touch device tap-to-remove
marker.on('contextmenu dblclick', function(event) {
var corners = that._markers.length;
if (corners <= 3)
return;
var marker = event.target;
var pointNo = that._getPointNo(event.target);
//console.log("corners:", corners, "pointNo:", pointNo);
that._edited = true;
that._map.removeLayer(marker);
that._map.removeLayer(newPointMarker);
that._markers.splice(pointNo, 1);
that._reloadPolygon(pointNo);
});
var previousPoint = points[pointNo == 0 ? points.length - 1 : pointNo - 1];
var newPointMarker = L.marker([(latLng.lat + previousPoint.lat) / 2.,
(latLng.lng + previousPoint.lng) / 2.],
{draggable: true, icon: this.newPointIcon});
marker.newPointMarker = newPointMarker;
newPointMarker.on('dragstart', function(event) {
that._lastMouseEvent = event.originalEvent;
var pointNo = that._getPointNo(event.target);
//console.log("pointNo", pointNo);
var previousPoint = pointNo - 1 >= 0 ? that._markers[pointNo - 1].getLatLng() : that._markers[that._markers.length - 1].getLatLng();
var nextPoint = that._markers[pointNo].getLatLng();
that._edited = true;
that._setupDragLines(marker.newPointMarker, previousPoint, nextPoint);
that._setBusy(true);
that._hideAll(marker.newPointMarker);
});
newPointMarker.on('dragend', function(event) {
// console.log("dragend", event);
var marker = event.target;
var pointNo = that._getPointNo(event.target);
that._addMarkers(pointNo, marker.getLatLng(), true);
setTimeout(function() {
that._setBusy(false);
that._reloadPolygon();
}, 25);
});
newPointMarker.on('click', function(event) {
// console.log("click", event);
var marker = event.target;
var pointNo = that._getPointNo(event.target);
that._addMarkers(pointNo, marker.getLatLng(), true);
setTimeout(function() {
that._reloadPolygon();
}, 25);
});
// if (this._options.deletableEdges) {
// newPointMarker.on('contextmenu', function(event) {
// // 1. Remove this polyline from map
// var marker = event.target;
// var pointNo = that._getPointNo(marker);
// var markers = that.getPoints();
// that._hideAll();
// var secondPartMarkers = that._markers.slice(pointNo, pointNo.length);
// that._markers.splice(pointNo, that._markers.length - pointNo);
// that._reloadPolygon();
// var points = [];
// var contexts = [];
// for(var i = 0; i < secondPartMarkers.length; i++) {
// var marker = secondPartMarkers[i];
// points.push(marker.getLatLng());
// contexts.push(marker.context);
// }
// //console.log('points:' + points);
// //console.log('contexts:' + contexts);
// // Need to know the current polyline order numbers, because
// // the splitted one need to be inserted immediately after:
// var originalPolygonNo = that._map._editablePolygons.indexOf(that);
// var newPolygon = L.Polygon.PolygonEditor(points, that._options, contexts, originalPolygonNo + 1)
// .addTo(that._map);
// that._showBoundMarkers();
// //console.log('Done split, _editablePolygons now:' + that._map._editablePolygons.length);
// });
// }
this._markers.splice(pointNo, 0, marker);
if(fixNeighbourPositions) {
this._fixNeighbourPositions(pointNo);
}
return marker;
};
/**
* Fix nearby new point markers when the new point is created.
*/
this._fixNeighbourPositions = function(pointNo) {
var previousMarker = pointNo == 0 ? this._markers[this._markers.length - 1] : this._markers[pointNo - 1];
var marker = this._markers[pointNo];
var nextMarker = pointNo < this._markers.length - 1 ? this._markers[pointNo + 1] : this._markers[0];
//console.log("_fixNeighbourPositions:", pointNo, this._markers.length);
//console.log("markers:", marker, previousMarker, nextMarker);
if(!marker && previousMarker && nextMarker) {
// //console.log("last point deleted!");
nextMarker.newPointMarker.setLatLng([(previousMarker.getLatLng().lat + nextMarker.getLatLng().lat) / 2.,
(previousMarker.getLatLng().lng + nextMarker.getLatLng().lng) / 2.]);
}
if(marker && previousMarker) {
// //console.log("marker && previousMarker");
marker.newPointMarker.setLatLng([(previousMarker.getLatLng().lat + marker.getLatLng().lat) / 2.,
(previousMarker.getLatLng().lng + marker.getLatLng().lng) / 2.]);
}
if(marker && nextMarker) {
// //console.log("marker && nextMarker");
nextMarker.newPointMarker.setLatLng([(marker.getLatLng().lat + nextMarker.getLatLng().lat) / 2.,
(marker.getLatLng().lng + nextMarker.getLatLng().lng) / 2.]);
}
};
/**
* Find the order number of the marker.
*/
this._getPointNo = function(marker) {
for(var i = 0; i < this._markers.length; i++) {
if(marker == this._markers[i] || marker == this._markers[i].newPointMarker) {
return i;
}
}
return -1;
};
/**
* Get polyline latLngs based on marker positions.
*/
this._getMarkerLatLngs = function() {
var result = [];
for(var i = 0; i < this._markers.length; i++)
result.push(this._markers[i].getLatLng());
return result;
};
this._setupDragLines = function(marker, point1, point2) {
// //console.log("_setupDragLines", marker, point1, point2);
var line1 = null;
var line2 = null;
var markerLatlng = marker.getLatLng();
var offsetLat = 0;
var offsetLng = 0;
if (this._lastMouseEvent) {
var mousePoint = this._map.mouseEventToLatLng(this._lastMouseEvent);
offsetLat = markerLatlng.lat - mousePoint.lat;
offsetLng = markerLatlng.lng - mousePoint.lng;
// console.log(markerLatlng, mouseLatlng);
}
// console.log(markerLatlng, this._lastMouseEvent);
if(point1) line1 = L.polyline([markerLatlng, point1], {dashArray: "5,5", weight: 1})
.addTo(that._map);
if(point2) line2 = L.polyline([markerLatlng, point1], {dashArray: "5,5", weight: 1})
.addTo(that._map);
var moveHandler = function(event) {
// add the offsets from the marker
// so aux lines appear in the tip of the marker
var latlngPoint = L.latLng(event.latlng.lat + offsetLat, event.latlng.lng + offsetLng);
if(line1)
line1.setLatLngs([latlngPoint, point1]);
if(line2)
line2.setLatLngs([latlngPoint, point2]);
};
var stopHandler = function(event) {
that._map.off('mousemove', moveHandler);
marker.off('dragend', stopHandler);
if(line1) that._map.removeLayer(line1);
if(line2) that._map.removeLayer(line2);
//console.log('STOPPED');
if(event.target != that._map) {
that._map.fire('click', event);
}
};
that._map.on('mousemove', moveHandler);
marker.on('dragend', stopHandler);
that._map.once('click', stopHandler);
marker.once('click', stopHandler);
if(line1) line1.once('click', stopHandler);
if(line2) line2.once('click', stopHandler);
}
}
});
L.Polygon.polygonEditor.addInitHook(function () {
// Hack to keep reference to map:
this.originalAddTo = this.addTo;
this.addTo = function(map) {
this.originalAddTo(map);
this._map = map;
this._addMethods();
/**
* When addint a new point we must disable the user to mess with other
* markers. One way is to check everywhere if the user is busy. The
* other is to just remove other markers when the user is doing
* somethinng.
*
* TODO: Decide the right way to do this and then leave only _busy or
* _hideAll().
*/
this._busy = false;
this._initialized = false;
this._edited = false;
this._init(this._options, this._contexts);
this._initialized = true;
return this;
};
});
/**
* Construct a new editable polyline.
*
* latlngs ... a list of points (or two-element tuples with coordinates)
* options ... polyline options
* contexts ... custom contexts for every point in the polyline. Must have the
* same number of elements as latlngs and this data will be
* preserved when new points are added or polylines splitted.
* polygonNo ... insert this polyline in a specific order (used when splitting).
*
* More about contexts:
* This is an array of objects that will be kept as "context" for every
* point. Marker will keep this value as marker.context. New markers will
* have context set to null.
*
* Contexts must be the same size as the polyline size!
*
* By default, even without calling this method -- every marker will have
* context with one value: marker.context.originalPointNo with the
* original order number of this point. The order may change if some
* markers before this one are delted or new added.
*/
L.Polygon.PolygonEditor = function(latlngs, options, contexts, polygonNo) {
var result = new L.Polygon.polygonEditor(latlngs, options);
result._options = options;
result._contexts = contexts;
result._desiredPolygonNo = polygonNo
return result;
};
| NYPL/building-inspector | app/assets/javascripts/lib/vendor/leaflet-editable-polygon.js | JavaScript | apache-2.0 | 23,188 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the Apache 2 License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using ServiceFabricPersistence.Interfaces;
using Microsoft.ServiceFabric;
using Microsoft.ServiceFabric.Actors;
namespace ServiceFabricPersistence
{
public class ServiceFabricSnapshotStore : Actor<ServiceFabricSnapshotStoreState>, IServiceFabricSnapshotStore
{
public override Task OnActivateAsync()
{
if (this.State == null)
{
this.State = new ServiceFabricSnapshotStoreState();
}
ActorEventSource.Current.ActorMessage(this, "State initialized to {0}", this.State);
return Task.FromResult(true);
}
public Task<SnapshotEntry> SelectSnapshotAsync(long maxSequenceNr, DateTime maxTimeStamp)
{
IEnumerable<KeyValuePair<long, SnapshotEntry>> snapshots = State.snapshotStore;
ActorEventSource.Current.ActorMessage(this, "selectSnapshotAsync {0}-{1}", maxSequenceNr, maxTimeStamp);
if (maxSequenceNr > 0 && maxSequenceNr < long.MaxValue)
{
snapshots = from e in this.State.snapshotStore
where e.Key <= maxSequenceNr
select e;
}
if(maxTimeStamp > DateTime.MinValue && maxTimeStamp < DateTime.MaxValue)
{
snapshots = from e in snapshots
where e.Value.Timestamp == maxTimeStamp
select e;
}
//TODO: Double-check selection criteria
var snapshot = snapshots.ToList<KeyValuePair<long, SnapshotEntry>>();
var retValue = snapshot.Any() ? snapshot.Last().Value : null;
return Task.FromResult(retValue);
}
public Task WriteSnapshotAsync(SnapshotEntry s)
{
ActorEventSource.Current.ActorMessage(this, "writeSnapshot {0}-{1}", s.SequenceNr, s.Timestamp);
State.snapshotStore.Add(s.SequenceNr, s);
return Task.FromResult(true);
}
public Task DeleteSnapshotAsync(long maxSequenceNr, DateTime maxTimeStamp)
{
IEnumerable<KeyValuePair<long, SnapshotEntry>> snapshots = State.snapshotStore;
ActorEventSource.Current.ActorMessage(this, "deleteSnapshot {0}-{1}", maxSequenceNr, maxTimeStamp);
ActorEventSource.Current.ActorMessage(this, "DeleteSnapshot {0}-{1}-{2}", maxSequenceNr, maxTimeStamp);
if (maxSequenceNr > 0 && maxSequenceNr < long.MaxValue)
{
snapshots = from e in this.State.snapshotStore
where e.Key <= maxSequenceNr
select e;
}
if (maxTimeStamp > DateTime.MinValue && maxTimeStamp < DateTime.MaxValue)
{
snapshots = from e in snapshots
where e.Value.Timestamp == maxTimeStamp
select e;
}
foreach (var s in snapshots)
State.snapshotStore.Remove(s.Key);
return Task.FromResult(true);
}
public Task DeleteSnapshotManyAsync(long maxSequenceNr, DateTime maxTimeStamp)
{
ActorEventSource.Current.ActorMessage(this, "DeleteSnapshotMany {0}-{1}", maxSequenceNr, maxTimeStamp);
if (maxSequenceNr > 0 && maxSequenceNr < long.MaxValue)
{
var snapshot = from e in this.State.snapshotStore
where e.Key == maxSequenceNr
select e;
State.snapshotStore.Remove(snapshot.First().Key);
}
if (maxTimeStamp > DateTime.MinValue && maxTimeStamp < DateTime.MaxValue)
{
var snapshot = from e in this.State.snapshotStore
where e.Value.Timestamp == maxTimeStamp
select e;
State.snapshotStore.Remove(snapshot.First().Key);
}
return Task.FromResult(true);
}
}
}
| yonglehou/Akka.Persistence.ServiceFabric | src/ServiceFabricPersistence/ServiceFabricSnapshotStore.cs | C# | apache-2.0 | 4,298 |
/*
* Copyright 2011 Vincent Behar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rundeck.api.domain;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Result of importing some jobs into RunDeck
*
* @author Vincent Behar
*/
public class RundeckJobsImportResult implements Serializable {
private static final long serialVersionUID = 1L;
private final List<RundeckJob> succeededJobs = new ArrayList<RundeckJob>();
private final List<RundeckJob> skippedJobs = new ArrayList<RundeckJob>();
private final Map<RundeckJob, String> failedJobs = new HashMap<RundeckJob, String>();
public void addSucceededJob(RundeckJob job) {
succeededJobs.add(job);
}
public void addSkippedJob(RundeckJob job) {
skippedJobs.add(job);
}
public void addFailedJob(RundeckJob job, String errorMessage) {
failedJobs.put(job, errorMessage);
}
public List<RundeckJob> getSucceededJobs() {
return succeededJobs;
}
public List<RundeckJob> getSkippedJobs() {
return skippedJobs;
}
public Map<RundeckJob, String> getFailedJobs() {
return failedJobs;
}
@Override
public String toString() {
return "RundeckJobsImportResult [succeededJobs=" + succeededJobs + ", skippedJobs=" + skippedJobs
+ ", failedJobs=" + failedJobs + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((failedJobs == null) ? 0 : failedJobs.hashCode());
result = prime * result + ((skippedJobs == null) ? 0 : skippedJobs.hashCode());
result = prime * result + ((succeededJobs == null) ? 0 : succeededJobs.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
RundeckJobsImportResult other = (RundeckJobsImportResult) obj;
if (failedJobs == null) {
if (other.failedJobs != null)
return false;
} else if (!failedJobs.equals(other.failedJobs))
return false;
if (skippedJobs == null) {
if (other.skippedJobs != null)
return false;
} else if (!skippedJobs.equals(other.skippedJobs))
return false;
if (succeededJobs == null) {
if (other.succeededJobs != null)
return false;
} else if (!succeededJobs.equals(other.succeededJobs))
return false;
return true;
}
}
| vbehar/rundeck-api-java-client | src/main/java/org/rundeck/api/domain/RundeckJobsImportResult.java | Java | apache-2.0 | 3,268 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "forge-"
cfg.versionfile_source = "forge/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| sipplified/forge | forge/_version.py | Python | apache-2.0 | 18,448 |
(function () {
'use strict';
angular.module('horizon.framework.widgets.help-panel', [])
.directive('helpPanel', ['horizon.framework.widgets.basePath',
function (path) {
return {
templateUrl: path + 'help-panel/help-panel.html',
transclude: true
};
}
]);
})();
| Hodorable/0602 | horizon/static/framework/widgets/help-panel/help-panel.js | JavaScript | apache-2.0 | 321 |
<script>
$(document).ready(function () {
$(function() {
$( "#datepicker" ).datepicker({
changeMonth: true,
changeYear: true,
dateFormat: "yy-mm-dd",
yearRange: "c-50,c"
});
});
});
function check_input(){
var temp = $('#milestone').val();
var temp1 = $('#datepicker').val();
if(temp != '' && temp1 != '')
return true;
else
return false;
}
</script>
<div id="com_proj_container">
<div id="title_container" class="title_post">
<div id="title">Edit Project Milestone Details</div>
</div>
<br />
<div class="body_post">
<?php $str = str_replace(' ','_',$committee_info->name); $str1 = str_replace(' ','_',$project_info->project);?>
<?php $attr = array('onsubmit' => 'return check_input()'); echo form_open('committees/project_milestones/'.$str.'/'.$str1.'/update/'.$proj_milestone->id, $attr);?>
<table style="width:100%">
<tr>
<td colspan="3">
<?php
echo form_label('Milestone:', 'milestone').' ';
echo form_input('milestone', $proj_milestone->milestone, 'id="milestone" style="width:540px;"');
?>
</td>
</tr>
<tr>
<td>
<?php
echo form_label('Date Due:', 'date_due').' ';
echo form_input('date_due', $proj_milestone->date_due, 'id="datepicker" style="width:110px;"');
?>
</td>
<td>
<?php
echo form_label('Faci:', 'user_list').' ';
echo form_dropdown('user_list',$user_list,$faci_id, 'class="user_dropdown" style="width:200px;"');
?>
</td>
<td>
<?php
$status = array(
'Pending' => 'Pending',
'Done' => 'Done'
);
echo form_label('Status:', 'status').' ';
echo form_dropdown('status',$status,$proj_milestone->status, 'class="status" style="width:110px;"');
?>
</td>
</tr>
<tr>
<td>
</td>
</tr>
<tr>
<td colspan='3' align="center">
<input class="readmore_button" type="submit" value="GO!" />
</td>
</tr>
</table>
<?php echo '</form>';?>
</div>
</div> | niknokseyer/formdev | application/views/proj_milestones_edit.php | PHP | apache-2.0 | 2,067 |
// Copyright 2008 Google Inc. All Rights Reserved.
package org.waveprotocol.wave.model.document.indexed;
import junit.framework.TestCase;
import org.waveprotocol.wave.model.document.AnnotationInterval;
import org.waveprotocol.wave.model.document.DocumentTestCases;
import org.waveprotocol.wave.model.document.MutableDocument;
import org.waveprotocol.wave.model.document.RangedAnnotation;
import org.waveprotocol.wave.model.document.operation.AnnotationBoundaryMapBuilder;
import org.waveprotocol.wave.model.document.operation.Attributes;
import org.waveprotocol.wave.model.document.operation.Automatons;
import org.waveprotocol.wave.model.document.operation.BufferedDocOp;
import org.waveprotocol.wave.model.document.operation.DocInitialization;
import org.waveprotocol.wave.model.document.operation.Nindo;
import org.waveprotocol.wave.model.document.operation.algorithm.DocOpInverter;
import org.waveprotocol.wave.model.document.operation.automaton.DocOpAutomaton.ViolationCollector;
import org.waveprotocol.wave.model.document.operation.automaton.DocumentSchema;
import org.waveprotocol.wave.model.document.operation.impl.AttributesImpl;
import org.waveprotocol.wave.model.document.operation.impl.DocInitializationBuilder;
import org.waveprotocol.wave.model.document.operation.impl.DocOpBuilder;
import org.waveprotocol.wave.model.document.operation.impl.DocOpUtil;
import org.waveprotocol.wave.model.document.operation.impl.DocOpValidator;
import org.waveprotocol.wave.model.document.raw.TextNodeOrganiser;
import org.waveprotocol.wave.model.document.raw.impl.Element;
import org.waveprotocol.wave.model.document.raw.impl.Node;
import org.waveprotocol.wave.model.document.raw.impl.RawDocumentImpl;
import org.waveprotocol.wave.model.document.raw.impl.Text;
import org.waveprotocol.wave.model.document.util.Annotations;
import org.waveprotocol.wave.model.document.util.ContextProviders;
import org.waveprotocol.wave.model.document.util.ContextProviders.TestDocumentContext;
import org.waveprotocol.wave.model.document.util.DocProviders;
import org.waveprotocol.wave.model.document.util.LocalDocument;
import org.waveprotocol.wave.model.document.util.XmlStringBuilder;
import org.waveprotocol.wave.model.operation.OperationException;
import org.waveprotocol.wave.model.operation.OperationRuntimeException;
import org.waveprotocol.wave.model.util.CollectionUtils;
import java.util.Collections;
import java.util.Iterator;
/**
* Tests for IndexedDocumentImpl.
*
*
*/
public class IndexedDocumentImplTest extends TestCase {
/**
* A parser for documents.
*/
public static final NindoTestCases.DocumentParser<
IndexedDocumentImpl<Node, Element, Text, ?>> nindoDocumentParser =
new NindoTestCases.DocumentParser<IndexedDocumentImpl<Node, Element, Text, ?>>() {
public IndexedDocumentImpl<Node, Element, Text, ?> parseDocument(String documentString) {
return doParseDocument(documentString);
}
public String asString(IndexedDocumentImpl<Node, Element, Text, ?> document) {
return document.toString();
}
@Override
public IndexedDocumentImpl<Node, Element, Text, ?> copyDocument(
IndexedDocumentImpl<Node, Element, Text, ?> other) {
return doCopyDocument(other);
}
};
/**
* A parser for documents.
*/
public static final DocumentTestCases.DocumentParser<
IndexedDocumentImpl<Node, Element, Text, ?>> documentParser =
new DocumentTestCases.DocumentParser<IndexedDocumentImpl<Node, Element, Text, ?>>() {
public IndexedDocumentImpl<Node, Element, Text, ?> parseDocument(String documentString) {
return doParseDocument(documentString);
}
public String asString(IndexedDocumentImpl<Node, Element, Text, ?> document) {
return document.toString();
}
@Override
public IndexedDocumentImpl<Node, Element, Text, ?> copyDocument(
IndexedDocumentImpl<Node, Element, Text, ?> other) {
return doCopyDocument(other);
}
};
private static IndexedDocumentImpl<Node, Element, Text, ?>
doParseDocument(String documentString) {
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.parse("<blah>" + documentString + "</blah>"), null,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
return doc;
}
private static IndexedDocumentImpl<Node, Element, Text, ?> doCopyDocument(
IndexedDocumentImpl<Node, Element, Text, ?> other) {
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.create("doc", Attributes.EMPTY_MAP), null,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
try {
doc.consume(other.asOperation());
} catch (OperationException e) {
throw new OperationRuntimeException("Copy should not fail", e);
}
return doc;
}
/**
* Runs the tests for the insertion of text.
*/
public void testNindoTextInsertion() {
NindoTestCases.runTextInsertionTests(nindoDocumentParser);
}
/**
* Runs the tests for the deletion of text.
*/
public void testNindoTextDeletion() {
NindoTestCases.runTextDeletionTests(nindoDocumentParser);
}
/**
* Runs the tests for the insertion of elements.
*/
public void testNindoElementInsertion() {
NindoTestCases.runElementInsertionTests(nindoDocumentParser);
}
/**
* Runs the tests for the deletion of elements.
*/
public void testNindoElementDeletion() {
NindoTestCases.runElementDeletionTests(nindoDocumentParser);
}
/**
* Runs the tests for the setting and removal of attributes.
*/
public void testNindoAttributes() {
NindoTestCases.runAttributeTests(nindoDocumentParser);
}
/**
* Runs a miscellany of tests.
*/
public void testNindoMiscellaneous() {
NindoTestCases.runMiscellaneousTests(nindoDocumentParser);
}
/**
* Runs the tests for the insertion of text.
*/
public void testTextInsertion() {
DocumentTestCases.runTextInsertionTests(documentParser);
}
/**
* Runs the tests for the deletion of text.
*/
public void testTextDeletion() {
DocumentTestCases.runTextDeletionTests(documentParser);
}
/**
* Runs the tests for the insertion of elements.
*/
public void testElementInsertion() {
DocumentTestCases.runElementInsertionTests(documentParser);
}
/**
* Runs the tests for the deletion of elements.
*/
public void testElementDeletion() {
DocumentTestCases.runElementDeletionTests(documentParser);
}
/**
* Runs the tests for the setting and removal of attributes.
*/
public void testAttributes() {
DocumentTestCases.runAttributeTests(documentParser);
}
/**
* Runs a miscellany of tests.
*/
public void testMiscellaneous() {
DocumentTestCases.runMiscellaneousTests(documentParser);
}
/**
* Tests the asOperation method.
*/
public void testAsOperation() {
IndexedDocumentImpl<Node, Element, Text, ?> document =
documentParser.parseDocument(
"<blip><p><i>ab</i>cd<b>ef</b>gh</p></blip>");
DocInitialization expected = new DocInitializationBuilder()
.elementStart("blip", Attributes.EMPTY_MAP)
.elementStart("p", Attributes.EMPTY_MAP)
.elementStart("i", Attributes.EMPTY_MAP)
.characters("ab")
.elementEnd()
.characters("cd")
.elementStart("b", Attributes.EMPTY_MAP)
.characters("ef")
.elementEnd()
.characters("gh")
.elementEnd()
.elementEnd()
.build();
document.asOperation();
assertEquals(
DocOpUtil.toConciseString(expected),
DocOpUtil.toConciseString(document.asOperation()));
}
private void checkApply(IndexedDocument<Node, Element, Text> doc, Nindo op)
throws OperationException {
System.out.println("");
System.out.println("============================================");
DocInitialization docAsOp = doc.asOperation();
String initial = DocOpUtil.toXmlString(docAsOp);
IndexedDocument<Node, Element, Text> copy = DocProviders.POJO.build(docAsOp,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
System.out.println(doc);
BufferedDocOp docOp = doc.consumeAndReturnInvertible(op);
System.out.println(op + "==========> " + docOp);
ViolationCollector v = new ViolationCollector();
if (!DocOpValidator.validate(v, DocumentSchema.NO_SCHEMA_CONSTRAINTS,
Automatons.fromReadable(copy), docOp).isValid()) {
v.printDescriptions(System.err);
fail("Invalid operation");
}
copy.consume(docOp);
System.out.println("=======" + doc + " --------- " + copy);
assertEquals(
DocOpUtil.toXmlString(doc.asOperation()),
DocOpUtil.toXmlString(copy.asOperation()));
BufferedDocOp inverted = DocOpInverter.invert(docOp);
v = new ViolationCollector();
if (!DocOpValidator.validate(v, DocumentSchema.NO_SCHEMA_CONSTRAINTS,
Automatons.fromReadable(copy), inverted).isValid()) {
v.printDescriptions(System.err);
fail("Invalid operation");
}
copy.consume(inverted);
assertEquals(initial, DocOpUtil.toXmlString(copy.asOperation()));
}
public void testReverseAnnotations() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("<a></a>");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.characters("x");
b.endAnnotation("a");
checkApply(doc, b.build());
// mutating into:
// <a>
// x{a=2}
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
// mutating into:
// <a>
// w{a=2}
// x{a=2, b=1}
// y{a=3, b=1}
// z{a=3, b=2}
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.characters("w");
b.endAnnotation("a");
b.startAnnotation("b", "1");
b.skip(1);
b.startAnnotation("a", "3");
b.characters("y");
b.startAnnotation("b", "2");
b.characters("z");
b.endAnnotation("a");
b.endAnnotation("b");
checkApply(doc, b.build());
// mutating into:
// <a>
// y{a=4, b=1}
b = new Nindo.Builder();
b.skip(1);
b.deleteCharacters(2);
b.startAnnotation("a", "4");
b.skip(1);
b.deleteCharacters(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testAnnotationThroughInsertionEndingInDeletion() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.characters("x");
b.deleteCharacters(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testAnnotationThroughInsertionFollowedByDeletion() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.characters("x");
b.endAnnotation("a");
b.deleteCharacters(1);
checkApply(doc, b.build());
}
public void testInsertionThenDeletionWithAnnotations() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.skip(2);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", null);
b.characters("x");
b.deleteCharacters(1);
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testReAnnotate() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "2");
b.skip(1);
b.startAnnotation("a", "3");
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "3");
b.skip(2);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testEndBeforeAndStartAfterDeletion() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", null);
b.skip(1);
b.endAnnotation("a");
b.deleteCharacters(1);
b.startAnnotation("a", "1");
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testEndBeforeAndStartAfterDeletionThenInsertion() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", null);
b.skip(1);
b.endAnnotation("a");
b.deleteCharacters(1);
b.startAnnotation("a", "1");
b.characters("x");
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testChangeBetweenInsertionAndDeletion() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.characters("x");
b.startAnnotation("a", "2");
b.deleteCharacters(1);
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testOpenClose() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.startAnnotation("b", "2");
b.startAnnotation("c", "3");
b.endAnnotation("a");
b.endAnnotation("c");
b.endAnnotation("b");
checkApply(doc, b.build());
}
public void testOpenInsertOpenClose() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.characters("xyz");
b.startAnnotation("a", "1");
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testOpenDuringInsertionThenUpdate() throws OperationException {
IndexedDocument<Node, Element, Text> doc =
DocProviders.POJO.parse("<q><r/></q>abcdefghijkl");
Nindo.Builder b;
b = new Nindo.Builder();
b.startAnnotation("a", "1");
b.skip(7);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.elementStart("p", Attributes.EMPTY_MAP);
b.startAnnotation("a", null);
b.elementEnd();
b.updateAttributes(Collections.singletonMap("u", "v"));
b.replaceAttributes(new AttributesImpl("v", "u"));
b.skip(1);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testOpenDuringInsertionThenUpdate2() throws OperationException {
IndexedDocument<Node, Element, Text> doc =
DocProviders.POJO.parse("abcdef<q><r/></q>ghijkl");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(8);
b.startAnnotation("a", "1");
b.skip(5);
b.endAnnotation("a");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.startAnnotation("a", "1");
b.skip(7);
b.updateAttributes(Collections.singletonMap("u", "v"));
//b.replaceAttributes(new AttributesImpl("v", "u"));
b.skip(3);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testDeletionResets() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefghijkl");
Nindo.Builder b;
b = new Nindo.Builder();
b.startAnnotation("a", "1");
b.skip(3);
b.deleteCharacters(3);
b.skip(3);
b.endAnnotation("a");
checkApply(doc, b.build());
}
public void testRedundantAnnotationsPreserved() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefg");
IndexedDocument<Node, Element, Text> doc2 = DocProviders.POJO.parse("abcdefg");
Nindo.Builder b;
b = new Nindo.Builder();
b.startAnnotation("a", "1");
b.skip(7);
b.endAnnotation("a");
checkApply(doc2, b.build());
b = new Nindo.Builder();
b.startAnnotation("a", null);
b.skip(2);
b.startAnnotation("a", "2");
b.skip(2);
b.startAnnotation("a", null);
b.skip(3);
b.endAnnotation("a");
BufferedDocOp docOp = doc.consumeAndReturnInvertible(b.build());
doc2.consumeAndReturnInvertible(Nindo.fromDocOp(docOp, true));
assertEquals(
DocOpUtil.toXmlString(doc.asOperation()),
DocOpUtil.toXmlString(doc2.asOperation()));
}
public void testNoRedundantSkips() throws OperationException {
IndexedDocument<Node, Element, Text> doc = DocProviders.POJO.parse("abcdefghijkl");
Nindo.Builder b;
b = new Nindo.Builder();
b.skip(1);
b.startAnnotation("a", "1");
b.skip(1);
b.startAnnotation("b", "1");
b.skip(1);
b.endAnnotation("a");
b.skip(1);
b.startAnnotation("c", "1");
b.skip(1);
b.endAnnotation("c");
b.skip(1);
b.endAnnotation("b");
b.skip(1);
b.startAnnotation("c", "1");
b.skip(1);
b.endAnnotation("c");
checkApply(doc, b.build());
b = new Nindo.Builder();
b.startAnnotation("z", "1");
b.skip(doc.size());
b.endAnnotation("z");
BufferedDocOp docOp = doc.consumeAndReturnInvertible(b.build());
assertEquals(3, docOp.size());
}
public void testBug1() throws OperationException {
IndexedDocumentImpl<Node, Element, Text, ?> d = nindoDocumentParser.parseDocument(
"<a>a</a>");
Nindo.Builder b = new Nindo.Builder();
b.skip(1);
b.deleteCharacters(1);
checkApply(d, b.build());
}
//
// public void testReverseBug1() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.startAnnotation("b", "3");
// d.characters("abc");
// d.endAnnotation("b");
// d.elementEnd();
// d.finish();
//
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
// String beforeXml = OperationXmlifier.xmlify(d);
//
// d.begin();
// d.skip(2);
// d.startAnnotation("a", "2");
// d.characters("abcd");
// d.deleteCharacters(1);
// d.endAnnotation("a");
// d.finish();
//
// String afterXml = OperationXmlifier.xmlify(d);
// DocumentMutation reverse = reverseSink.operation;
// reverse.apply(d);
// String reversedXml = OperationXmlifier.xmlify(d);
//
// assertEquals(beforeXml, reversedXml);
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(2);
// r.deleteCharacters(4);
// r.startAnnotation("a", null);
// r.startAnnotation("b", "3");
// r.characters("b");
// r.endAnnotation("a");
// r.endAnnotation("b");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverse.apply(checker);
// }
//
// public void testReverseBug2() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.characters("ababa");
// d.startAnnotation("e", "2");
// d.characters("d");
// d.startAnnotation("a", "1");
// d.characters("abcd");
// d.endAnnotation("a");
// d.characters("babc");
// d.endAnnotation("e");
// d.characters("de");
// d.elementEnd();
// d.finish();
//
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
//
// d.begin();
// d.skip(1);
// d.skip(14);
// d.deleteCharacters(1);
// d.startAnnotation("d", "2");
// d.startAnnotation("b", null);
// d.endAnnotation("d");
// d.endAnnotation("b");
// d.finish();
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(15);
// r.startAnnotation("a", null);
// r.startAnnotation("e", null);
// r.characters("d");
// r.endAnnotation("a");
// r.endAnnotation("e");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverseSink.operation.apply(checker);
// }
//
// public void testReverseBug3() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.characters("babcdefabcdabfabcdefabcdefghabcdefgh");
// d.startAnnotation("d", "3");
// d.characters("gab");
// d.startAnnotation("e", "1");
// d.characters("gababcabcefghidefghefaababcdefghiabcdefgh");
// d.endAnnotation("d");
// d.characters("defghi");
// d.startAnnotation("a", "1");
// d.characters("abcd");
// d.endAnnotation("e");
// d.characters("efg");
// d.endAnnotation("a");
// d.characters("cdefe");
// d.startAnnotation("b", "3");
// d.characters("f");
// d.endAnnotation("b");
// d.elementEnd();
// d.finish();
//
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
//
// String beforeXml = OperationXmlifier.xmlify(d);
//
// d.begin();
// d.skip(1);
// d.skip(15);
// d.startAnnotation("e", "1");
// d.skip(3);
// d.characters("abcd");
// d.skip(2);
// d.characters("abcdefgh");
// d.deleteCharacters(1);
// d.startAnnotation("a", "2");
// d.skip(24);
// d.startAnnotation("e", "3");
// d.skip(4);
// d.characters("abcd");
// d.startAnnotation("a", "1");
// d.deleteCharacters(1);
// d.skip(3);
// d.characters("abcdefghi");
// d.deleteCharacters(1);
// d.skip(13);
// d.startAnnotation("b", "1");
// d.endAnnotation("e");
// d.endAnnotation("b");
// d.endAnnotation("a");
// d.finish();
//
// String afterXml = OperationXmlifier.xmlify(d);
// DocumentMutation reverse = reverseSink.operation;
// reverse.apply(d);
// String reversedXml = OperationXmlifier.xmlify(d);
//
// assertEquals(beforeXml, reversedXml);
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(16);
// r.startAnnotation("e", null);
// r.skip(3);
// r.deleteCharacters(4);
// r.skip(2);
// r.deleteCharacters(8);
// r.startAnnotation("a", null);
// r.startAnnotation("b", null);
// r.startAnnotation("d", null);
// r.characters("a");
// r.endAnnotation("b");
// r.endAnnotation("d");
// r.skip(18);
// r.endAnnotation("e");
// r.skip(6);
// r.startAnnotation("e", "1");
// r.skip(4);
// r.deleteCharacters(4);
// r.startAnnotation("b", null);
// r.startAnnotation("d", "3");
// r.characters("f");
// r.endAnnotation("b");
// r.endAnnotation("d");
// r.skip(3);
// r.deleteCharacters(9);
// r.startAnnotation("b", null);
// r.startAnnotation("d", "3");
// r.characters("d");
// r.endAnnotation("b");
// r.endAnnotation("d");
// r.skip(13);
// r.endAnnotation("a");
// r.endAnnotation("e");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverse.apply(checker);
//
// assertEquals(beforeXml, reversedXml);
// }
//
// public void testReverseBug4() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.characters("a");
// d.startAnnotation("d", "2");
// d.characters("bc");
// d.endAnnotation("d");
// d.elementEnd();
// d.finish();
//
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
//
// String beforeXml = OperationXmlifier.xmlify(d);
//
// d.begin();
// d.skip(1);
// d.startAnnotation("e", "3");
// d.skip(1);
// d.endAnnotation("e");
// d.characters("x");
// d.deleteCharacters(1);
// d.finish();
//
// String afterXml = OperationXmlifier.xmlify(d);
// DocumentMutation reverse = reverseSink.operation;
// reverse.apply(d);
// String reversedXml = OperationXmlifier.xmlify(d);
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(1);
// r.startAnnotation("e", null);
// r.skip(1);
// r.deleteCharacters(1);
// r.startAnnotation("d", "2");
// r.characters("b");
// r.endAnnotation("d");
// r.endAnnotation("e");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverse.apply(checker);
//
// assertEquals(beforeXml, reversedXml);
// }
//
// public void testReverseBug5() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.startAnnotation("e", "1");
// d.characters("aab");
// d.endAnnotation("e");
// d.characters("c");
// d.elementEnd();
// d.finish();
//
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
//
// String beforeXml = OperationXmlifier.xmlify(d);
//
// d.begin();
// d.skip(1);
// d.skip(1);
// d.startAnnotation("e", "2");
// d.characters("a");
// d.deleteCharacters(1);
// d.skip(1);
// d.deleteCharacters(1);
// d.endAnnotation("e");
// d.finish();
//
// String afterXml = OperationXmlifier.xmlify(d);
// DocumentMutation reverse = reverseSink.operation;
// reverse.apply(d);
// String reversedXml = OperationXmlifier.xmlify(d);
//
// assertEquals(beforeXml, reversedXml);
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(2);
// r.deleteCharacters(1);
// r.startAnnotation("e", "1");
// r.characters("a");
// r.skip(1);
// r.startAnnotation("e", null);
// r.characters("c");
// r.endAnnotation("e");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverse.apply(checker);
// }
//
// public void testReverseBug6() throws OperationException {
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.BUILDER,
// new AnnotationTree<Object>("a", "b", null));
// d.begin();
// d.elementStart("a", Attributes.EMPTY_MAP);
// d.startAnnotation("d", "2");
// d.characters("a");
// d.startAnnotation("e", "1");
// d.characters("b");
// d.endAnnotation("d");
// d.endAnnotation("e");
// d.characters("b");
// d.elementEnd();
// d.finish();
// OperationContainer reverseSink = new OperationContainer();
// d.registerReverseSink(reverseSink);
//
// String beforeXml = OperationXmlifier.xmlify(d);
//
// d.begin();
// d.skip(1);
// d.startAnnotation("e", null);
// d.skip(2);
// d.endAnnotation("e");
// d.deleteCharacters(1);
// d.finish();
//
// String afterXml = OperationXmlifier.xmlify(d);
// DocumentMutation reverse = reverseSink.operation;
// reverse.apply(d);
// String reversedXml = OperationXmlifier.xmlify(d);
//
// assertEquals(beforeXml, reversedXml);
//
// DocumentOperationChecker.Recorder r = new DocumentOperationChecker.Recorder();
// r.begin();
// r.skip(2);
// r.startAnnotation("e", "1");
// r.skip(1);
// r.startAnnotation("d", null);
// r.startAnnotation("e", null);
// r.characters("b");
// r.endAnnotation("d");
// r.endAnnotation("e");
// r.finish();
// DocumentOperationChecker checker = r.finishRecording();
// reverse.apply(checker);
// }
//
// public void testConcurrentModificationException() throws OperationException {
// // The test is that this doesn't throw a ConcurrentModificationException.
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(RawDocumentImpl.PROVIDER,
// new AnnotationTree<Object>("a", "b", null));
// // initial
// d.begin();
// d.elementStart("blip", Attributes.EMPTY_MAP);
// {
// Map<String, String> a = new HashMap<String, String>();
// a.put("_t", "title");
// a.put("t", "h1");
// d.elementStart("p", new Attributes(a));
// }
// d.elementEnd();
// d.elementEnd();
// d.finish();
// // mutation
// d.begin();
// d.skip(1);
// d.setAttributes(new Attributes("_t", "title"));
// d.finish();
// }
//
// public void testNPE1() throws OperationException {
// // The test is that this doesn't throw a NullPointerException.
// IndexedDocumentImpl<Node, Element, Text, ?> d =
// new IndexedDocumentImpl<Node, Element, Text, Void>(
// RawDocumentImpl.PROVIDER,
// new AnnotationTree<Object>("a", "b", null));
//
// // initialization steps
// d.begin();
// d.elementStart("blip", Attributes.EMPTY_MAP);
// d.elementStart("p", new Attributes("_t", "title"));
//
// d.elementEnd();
// d.elementStart("p", Attributes.EMPTY_MAP);
// d.characters("a");
// d.elementEnd();
// d.elementStart("p", Attributes.EMPTY_MAP);
//
// d.elementEnd();
// d.elementStart("p", new Attributes("_t", "title"));
// d.elementEnd();
// d.elementStart("p", Attributes.EMPTY_MAP);
// d.elementEnd();
// d.elementEnd();
// d.finish();
//
// d.begin();
// d.skip(2);
// d.deleteAntiElementStart();
// d.deleteElementStart();
// d.deleteCharacters(1);
// d.deleteElementEnd();
// d.deleteAntiElementEnd(new Attributes("t", ""));
// d.finish();
//
// d.begin();
// d.skip(1);
// d.deleteElementStart();
// d.deleteElementEnd();
// d.finish();
//
// d.begin();
// d.skip(5);
// d.elementStart("p", Attributes.EMPTY_MAP);
// d.elementEnd();
// d.finish();
//
// // mutation that crashes
// // current state: <blip><p _t=title></p><p></p><p></p></blip>
// d.begin();
// d.skip(4);
// d.deleteAntiElementStart();
// d.deleteAntiElementEnd(Attributes.EMPTY_MAP);
// d.antiElementStart();
// d.antiElementEnd(Attributes.EMPTY_MAP);
// d.finish();
// }
public void testAnnotationIntervalIterator() throws OperationException {
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.parse("<doc><x><p>abcdefgh</p></x></doc>"),
new AnnotationTree<Object>("a", "b", null), DocumentSchema.NO_SCHEMA_CONSTRAINTS);
// 1-3: a=1, c=1
// 3-5: a=1, b=1, c=1
// 5-6: a=2, b=1, c=1
// 6-8: b=1, c=1
doc.consumeAndReturnInvertible(Nindo.setAnnotation(1, 5, "a", "1"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(5, 6, "a", "2"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(3, 8, "b", "1"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(1, 8, "c", "1"));
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(2, 10, CollectionUtils.newStringSet("a")).iterator();
{
AnnotationInterval<String> i = iterator.next();
assertEquals(2, i.start());
assertEquals(5, i.end());
assertEquals(1, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("1", i.annotations().get("a", "x"));
assertEquals(0, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("x", i.diffFromLeft().get("a", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(5, i.start());
assertEquals(6, i.end());
assertEquals(1, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("2", i.annotations().get("a", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("2", i.diffFromLeft().get("a", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(6, i.start());
assertEquals(10, i.end());
assertEquals(1, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals(null, i.annotations().get("a", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals(null, i.diffFromLeft().get("a", "x"));
}
assertFalse(iterator.hasNext());
}
// 1-3: a=1, c=1
// 3-5: a=1, b=1, c=1
// 5-6: a=2, b=1, c=1
// 6-8: b=1, c=1
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(2, 10, null).iterator();
{
AnnotationInterval<String> i = iterator.next();
assertEquals(2, i.start());
assertEquals(3, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("1", i.annotations().get("a", "x"));
assertEquals(null, i.annotations().get("b", "x"));
assertEquals("1", i.annotations().get("c", "x"));
assertEquals(0, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("x", i.diffFromLeft().get("a", "x"));
assertEquals("x", i.diffFromLeft().get("b", "x"));
assertEquals("x", i.diffFromLeft().get("c", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(3, i.start());
assertEquals(5, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("1", i.annotations().get("a", "x"));
assertEquals("1", i.annotations().get("b", "x"));
assertEquals("1", i.annotations().get("c", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("x", i.diffFromLeft().get("a", "x"));
assertEquals("1", i.diffFromLeft().get("b", "x"));
assertEquals("x", i.diffFromLeft().get("c", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(5, i.start());
assertEquals(6, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("2", i.annotations().get("a", "x"));
assertEquals("1", i.annotations().get("b", "x"));
assertEquals("1", i.annotations().get("c", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("2", i.diffFromLeft().get("a", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(6, i.start());
assertEquals(8, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals(null, i.annotations().get("a", "x"));
assertEquals("1", i.annotations().get("b", "x"));
assertEquals("1", i.annotations().get("c", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals(null, i.diffFromLeft().get("a", "x"));
assertEquals("x", i.diffFromLeft().get("b", "x"));
assertEquals("x", i.diffFromLeft().get("c", "x"));
}
{
AnnotationInterval<String> i = iterator.next();
assertEquals(8, i.start());
assertEquals(10, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals(null, i.annotations().get("a", "x"));
assertEquals(null, i.annotations().get("b", "x"));
assertEquals(null, i.annotations().get("c", "x"));
assertEquals(2, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("x", i.diffFromLeft().get("a", "x"));
assertEquals(null, i.diffFromLeft().get("b", "x"));
assertEquals(null, i.diffFromLeft().get("c", "x"));
}
assertFalse(iterator.hasNext());
}
// 1-3: a=1, c=1
// 3-5: a=1, b=1, c=1
// 5-6: a=2, b=1, c=1
// 6-8: b=1, c=1
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(3, 4, null).iterator();
{
AnnotationInterval<String> i = iterator.next();
assertEquals(3, i.start());
assertEquals(4, i.end());
assertEquals(3, CollectionUtils.newJavaMap(i.annotations()).size());
assertEquals("1", i.annotations().get("a", "x"));
assertEquals("1", i.annotations().get("b", "x"));
assertEquals("1", i.annotations().get("c", "x"));
assertEquals(1, CollectionUtils.newJavaMap(i.diffFromLeft()).size());
assertEquals("x", i.diffFromLeft().get("a", "x"));
assertEquals("1", i.diffFromLeft().get("b", "x"));
assertEquals("x", i.diffFromLeft().get("c", "x"));
}
assertFalse(iterator.hasNext());
}
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(3, 3, null).iterator();
assertFalse(iterator.hasNext());
}
}
public void testRangedAnnotationIterator() throws OperationException {
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.parse("<doc><x><p>abcdefgh</p></x></doc>"),
new AnnotationTree<Object>("a", "b", null), DocumentSchema.NO_SCHEMA_CONSTRAINTS);
// 1-3: a=1, c=1
// 3-5: a=1, b=1, c=1
// 5-6: a=2, b=1, c=1
// 6-8: b=1, c=1
doc.consumeAndReturnInvertible(Nindo.setAnnotation(1, 5, "a", "1"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(5, 6, "a", "2"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(3, 8, "b", "1"));
doc.consumeAndReturnInvertible(Nindo.setAnnotation(1, 8, "c", "1"));
{
Iterator<RangedAnnotation<String>> iterator =
doc.rangedAnnotations(2, 10, CollectionUtils.newStringSet("a")).iterator();
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals("1", r.value());
assertEquals(1, r.start());
assertEquals(5, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals("2", r.value());
assertEquals(5, r.start());
assertEquals(6, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals(null, r.value());
assertEquals(6, r.start());
assertEquals(12, r.end());
}
assertFalse(iterator.hasNext());
}
{
Iterator<RangedAnnotation<String>> iterator =
doc.rangedAnnotations(2, 10, null).iterator();
{
RangedAnnotation<String> r = iterator.next();
assertEquals("b", r.key());
assertEquals(null, r.value());
assertEquals(0, r.start());
assertEquals(3, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
RangedAnnotation<String> r2 = iterator.next();
// Order of these two ranges is unspecified; normalize it.
if ("c".equals(r.key())) {
RangedAnnotation<String> tmp = r2;
r2 = r;
r = tmp;
}
assertEquals("a", r.key());
assertEquals("1", r.value());
assertEquals(1, r.start());
assertEquals(5, r.end());
assertEquals("c", r2.key());
assertEquals("1", r2.value());
assertEquals(1, r2.start());
assertEquals(8, r2.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("b", r.key());
assertEquals("1", r.value());
assertEquals(3, r.start());
assertEquals(8, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals("2", r.value());
assertEquals(5, r.start());
assertEquals(6, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals(null, r.value());
assertEquals(6, r.start());
assertEquals(12, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
RangedAnnotation<String> r2 = iterator.next();
// Order of these two ranges is unspecified; normalize it.
if ("c".equals(r.key())) {
RangedAnnotation<String> tmp = r2;
r2 = r;
r = tmp;
}
assertEquals("b", r.key());
assertEquals(null, r.value());
assertEquals(8, r.start());
assertEquals(12, r.end());
assertEquals("c", r2.key());
assertEquals(null, r2.value());
assertEquals(8, r2.start());
assertEquals(12, r2.end());
}
assertFalse(iterator.hasNext());
}
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(3, 3, null).iterator();
assertFalse(iterator.hasNext());
}
}
public void testRangedAnnotationIteratorWithNonStrings() throws OperationException {
AnnotationTree<Object> annotations = new AnnotationTree<Object>(
"a", "b", null);
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.parse("<doc><x><p>abcdefgh</p></x></doc>"), annotations,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
// 1-3: a=1, @c=Object
// 3-5: a=1
doc.consumeAndReturnInvertible(Nindo.setAnnotation(1, 5, "a", "1"));
annotations.begin(false);
annotations.skip(1);
String c = Annotations.makeLocal("c");
annotations.startAnnotation(c, new Object());
annotations.skip(2);
annotations.endAnnotation(c);
annotations.finish();
{
Iterator<RangedAnnotation<String>> iterator =
doc.rangedAnnotations(2, 10, CollectionUtils.newStringSet("a")).iterator();
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals("1", r.value());
assertEquals(1, r.start());
assertEquals(5, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals(null, r.value());
assertEquals(5, r.start());
assertEquals(12, r.end());
}
assertFalse(iterator.hasNext());
}
{
Iterator<RangedAnnotation<String>> iterator =
doc.rangedAnnotations(2, 10, null).iterator();
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals("1", r.value());
assertEquals(1, r.start());
assertEquals(5, r.end());
}
{
RangedAnnotation<String> r = iterator.next();
assertEquals("a", r.key());
assertEquals(null, r.value());
assertEquals(5, r.start());
assertEquals(12, r.end());
}
assertFalse(iterator.hasNext());
}
{
Iterator<AnnotationInterval<String>> iterator =
doc.annotationIntervals(3, 3, null).iterator();
assertFalse(iterator.hasNext());
}
}
public void testSplitTextNeverReturnsSibling() {
TestDocumentContext<Node, Element, Text> cxt = ContextProviders.createTestPojoContext(
DocProviders.POJO.parse("ab").asOperation(),
null, null, null, DocumentSchema.NO_SCHEMA_CONSTRAINTS);
TextNodeOrganiser<Text> organiser = cxt.textNodeOrganiser();
MutableDocument<Node, Element, Text> doc = cxt.document();
Text first = (Text) doc.getFirstChild(doc.getDocumentElement());
Text text = organiser.splitText(first, 1);
LocalDocument<Node, Element, Text> local = cxt.annotatableContent();
Element tr = local.transparentCreate("l", Attributes.EMPTY_MAP, doc.getDocumentElement(), text);
local.transparentMove(tr, text, null, null);
assertNull(cxt.getIndexedDoc().splitText(first, 1));
assertNull(organiser.splitText(first, 1));
assertSame(first, organiser.splitText(first, 0));
assertSame(first, organiser.splitText(first, 0));
assertEquals("a<l>b</l>", XmlStringBuilder.innerXml(local).toString());
assertEquals("ab", XmlStringBuilder.innerXml(doc).toString());
}
public void testCantGetLocationOfInvalidNode() throws OperationException {
AnnotationTree<Object> annotations = new AnnotationTree<Object>(
"a", "b", null);
RawDocumentImpl rawDoc = RawDocumentImpl.PROVIDER.parse("<doc><p></p></doc>");
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(rawDoc, annotations,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
Node element = doc.getDocumentElement().getFirstChild();
// element is valid
assertEquals(0, doc.getLocation(element));
doc.consumeAndReturnInvertible(Nindo.deleteElement(0));
// element was deleted
try {
doc.getLocation(element);
fail("Expected: IllegalArgumentException");
} catch (IllegalArgumentException iae) {
// OK
}
// element that was never valid to begin with
try {
doc.getLocation(rawDoc.createElement("abc", Collections.<String, String>emptyMap(),
doc.getDocumentElement(), null));
fail("Expected: IllegalArgumentException");
} catch (IllegalArgumentException iae) {
// OK
}
}
public void testCheckRetainPerformace() throws OperationException {
final int length = 10000000;
StringBuilder b = new StringBuilder();
for (int i = 0; i < length; i++) {
b.append("z");
}
AnnotationTree<Object> annotations = new AnnotationTree<Object>(
"a", "b", null);
IndexedDocumentImpl<Node, Element, Text, ?> doc =
new IndexedDocumentImpl<Node, Element, Text, Void>(
RawDocumentImpl.PROVIDER.parse("<doc><p></p></doc>"), annotations,
DocumentSchema.NO_SCHEMA_CONSTRAINTS);
doc.consume(new DocOpBuilder().annotationBoundary(
new AnnotationBoundaryMapBuilder().change("a", null, "0").build())
.characters(b.toString()).retain(2)
.annotationBoundary(new AnnotationBoundaryMapBuilder().end("a").build()).build());
long startTime = System.currentTimeMillis();
final int reps = 10000;
for (int i = 0; i < reps; i++) {
assertTrue(
// The test is that this doesn't time out.
DocOpValidator.validate(null, DocumentSchema.NO_SCHEMA_CONSTRAINTS,
Automatons.fromReadable(doc),
new DocOpBuilder().annotationBoundary(
new AnnotationBoundaryMapBuilder().change("a", "0", "1").build())
.retain(length + 2)
.annotationBoundary(new AnnotationBoundaryMapBuilder().end("a").build()).build())
.isValid());
}
long endTime = System.currentTimeMillis();
long elapsed = endTime - startTime;
System.err.println("millis per rep: " + (((float) elapsed) / reps));
}
}
| processone/google-wave-api | wave-model/src/test/java/org/waveprotocol/wave/model/document/indexed/IndexedDocumentImplTest.java | Java | apache-2.0 | 48,115 |
package org.deri.any23.extractor.html;
import org.deri.any23.extractor.ExtractorFactory;
import org.deri.any23.vocab.HRECIPE;
import org.deri.any23.vocab.SINDICE;
import org.junit.Test;
import org.openrdf.model.Value;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.repository.RepositoryException;
/**
* Test case for {@link HRecipeExtractor}.
*
* @author Michele Mostarda (mostarda@fbk.eu)
*/
public class HRecipeExtractorTest extends AbstractExtractorTestCase {
private static final SINDICE vSINDICE = SINDICE.getInstance();
private static final HRECIPE vHRECIPE = HRECIPE.getInstance();
@Override
protected ExtractorFactory<?> getExtractorFactory() {
return HRecipeExtractor.factory;
}
@Test
public void testNoMicroformats() throws RepositoryException {
assertExtracts("html/html-without-uf.html");
assertModelEmpty();
}
@Test
public void testExtraction() throws RepositoryException {
assertExtracts("microformats/hrecipe/01-spec.html");
assertModelNotEmpty();
assertStatementsSize(RDF.TYPE, vHRECIPE.Recipe , 1);
assertStatementsSize(RDF.TYPE, vHRECIPE.Ingredient, 3);
assertStatementsSize(RDF.TYPE, vHRECIPE.Duration , 2);
assertStatementsSize(RDF.TYPE, vHRECIPE.Nutrition , 2);
assertStatementsSize(vHRECIPE.fn, (String) null, 1);
assertStatementsSize(vHRECIPE.yield, (String) null, 1);
assertStatementsSize(vHRECIPE.instructions, (String) null, 1);
assertStatementsSize(vHRECIPE.photo, (String) null, 1);
assertStatementsSize(vHRECIPE.summary, (String) null, 1);
assertStatementsSize(vHRECIPE.author, (String) null, 2);
assertStatementsSize(vHRECIPE.published, (String) null, 1);
assertStatementsSize(vHRECIPE.tag, (String) null, 2);
}
}
| venukb/any23 | any23-core/src/test/java/org/deri/any23/extractor/html/HRecipeExtractorTest.java | Java | apache-2.0 | 1,895 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* The Operations Management Suite (OMS) parameters.
*
*/
class ClusterMonitoringRequest {
/**
* Create a ClusterMonitoringRequest.
* @property {string} [workspaceId] The Operations Management Suite (OMS)
* workspace ID.
* @property {string} [primaryKey] The Operations Management Suite (OMS)
* workspace key.
*/
constructor() {
}
/**
* Defines the metadata of ClusterMonitoringRequest
*
* @returns {object} metadata of ClusterMonitoringRequest
*
*/
mapper() {
return {
required: false,
serializedName: 'ClusterMonitoringRequest',
type: {
name: 'Composite',
className: 'ClusterMonitoringRequest',
modelProperties: {
workspaceId: {
required: false,
serializedName: 'workspaceId',
type: {
name: 'String'
}
},
primaryKey: {
required: false,
serializedName: 'primaryKey',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = ClusterMonitoringRequest;
| xingwu1/azure-sdk-for-node | lib/services/hdInsightManagement/lib/models/clusterMonitoringRequest.js | JavaScript | apache-2.0 | 1,464 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package armyc2.c2sd.renderer.utilities;
/**
*
* @author michael.spinelli
*/
public class SinglePointLookupInfo {
private String _SymbolID = "";
private String _Description = "";
private int _mappingP = 0;
private int _mappingA = 0;
private int _height = 0;
private int _width = 0;
public SinglePointLookupInfo(String basicSymbolID, String description,
String mappingP, String mappingA,String width,String height)
{
_SymbolID = basicSymbolID;
_Description = description;
if(mappingP != null && mappingP.equals("") == false)
_mappingP = Integer.valueOf(mappingP);
if(mappingA != null && mappingA.equals("") == false)
_mappingA = Integer.valueOf(mappingA);
if(height != null && height.equals("") == false)
_height = Integer.valueOf(height);
if(width != null && width.equals("") == false)
_width = Integer.valueOf(width);
}
public String getBasicSymbolID()
{
return _SymbolID;
}
public String getDescription()
{
return _Description;
}
public int getMappingP()
{
return _mappingP;
}
public int getMappingA()
{
return _mappingA;
}
public int getHeight()
{
return _height;
}
public int getWidth()
{
return _width;
}
/**
*
* @return The newly cloned SPSymbolDef
*/
@Override
public SinglePointLookupInfo clone()
{
SinglePointLookupInfo defReturn;
defReturn = new SinglePointLookupInfo(_SymbolID, _Description,
String.valueOf(getMappingP()),
String.valueOf(getMappingA()),
String.valueOf(getWidth()),
String.valueOf(getHeight()));
return defReturn;
}
public String toXML()
{
String symbolId = "<SYMBOLID>" + getBasicSymbolID() + "</SYMBOLID>";
String mappingP = "<MAPPINGP>" + String.valueOf(getMappingP()) + "</MAPPINGP>";
String mappingA = "<MAPPINGA>" + String.valueOf(getMappingA()) + "</MAPPINGA>";
String description = "<DESCRIPTION>" + getDescription() + "</DESCRIPTION>";
String width = "<WIDTH>" + String.valueOf(getWidth()) + "</WIDTH>";
String height = "<HEIGHT>" + String.valueOf(getHeight()) + "</HEIGHT>";
String xml = symbolId + mappingP + mappingA + description + width + height;
return xml;
}
}
| spyhunter99/mil-sym-android | renderer/src/main/java/armyc2/c2sd/renderer/utilities/SinglePointLookupInfo.java | Java | apache-2.0 | 2,634 |
/**
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const ExternalAnchorsAudit =
require('../../../audits/dobetterweb/external-anchors-use-rel-noopener.js');
const assert = require('assert');
const URL = 'https://google.com/test';
/* eslint-env mocha */
describe('External anchors use rel="noopener"', () => {
it('passes when links are from same hosts as the page host', () => {
const auditResult = ExternalAnchorsAudit.audit({
AnchorsWithNoRelNoopener: [
{href: 'https://google.com/test'},
{href: 'https://google.com/test1'}
],
URL: {finalUrl: URL},
});
assert.equal(auditResult.rawValue, true);
assert.equal(auditResult.extendedInfo.value.length, 0);
});
it('fails when links are from different hosts than the page host', () => {
const auditResult = ExternalAnchorsAudit.audit({
AnchorsWithNoRelNoopener: [
{href: 'https://example.com/test'},
{href: 'https://example.com/test1'}
],
URL: {finalUrl: URL},
});
assert.equal(auditResult.rawValue, false);
assert.equal(auditResult.extendedInfo.value.length, 2);
});
it('handles links with no href attribute', () => {
const auditResult = ExternalAnchorsAudit.audit({
AnchorsWithNoRelNoopener: [
{href: ''},
{href: 'http://'},
{href: 'http:'}
],
URL: {finalUrl: URL},
});
assert.equal(auditResult.rawValue, false);
assert.equal(auditResult.extendedInfo.value.length, 3);
assert.ok(auditResult.debugString, 'includes debugString');
});
});
| cedricbellet/lighthouse | lighthouse-core/test/audits/dobetterweb/external-anchors-use-rel-noopener-test.js | JavaScript | apache-2.0 | 2,145 |
package org.alien4cloud.alm.deployment.configuration.services;
import javax.inject.Inject;
import org.alien4cloud.alm.events.BeforeApplicationEnvironmentDeleted;
import org.alien4cloud.alm.events.BeforeApplicationTopologyVersionDeleted;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
/**
* Clean local git repository when environment or topology is deleted
*/
@Component
public class LocalDeploymentConfigurationRepositoryCleaner {
@Inject
private DeploymentConfigurationDao deploymentConfigurationDao;
@EventListener
public void handleDeleteTopologyVersion(BeforeApplicationTopologyVersionDeleted event) {
deploymentConfigurationDao.deleteAllByTopologyVersionId(event.getApplicationId(), event.getTopologyVersion());
}
@EventListener
public void handleDeleteEnvironment(BeforeApplicationEnvironmentDeleted event) {
deploymentConfigurationDao.deleteAllByEnvironmentId(event.getApplicationId(), event.getApplicationEnvironmentId());
}
}
| alien4cloud/alien4cloud | alien4cloud-core/src/main/java/org/alien4cloud/alm/deployment/configuration/services/LocalDeploymentConfigurationRepositoryCleaner.java | Java | apache-2.0 | 1,050 |
<?php
/*
* user_info
*/
function get_user_info($config, $oauth_data) {
$aConfig = array (
'appid' => $config['appid'],
'appkey' => $config['appkey'],
'api' => 'get_user_info,add_topic,add_one_blog,add_album,upload_pic,list_album,add_share,check_page_fans,add_t,add_pic_t,del_t,get_repost_list,get_info,get_other_info,get_fanslist,get_idollist,add_idol,del_idol,get_tenpay_addr'
);
$sUrl = "https://graph.qq.com/user/get_user_info";
$aGetParam = array(
"access_token" => $oauth_data["access_token"],
"oauth_consumer_key" => $aConfig["appid"],
"openid" => $oauth_data["oauth_openid"],
"format" => "json"
);
$sContent = get($sUrl, $aGetParam);
if($sContent!==FALSE){
$user = json_decode($sContent, true);
return array("name"=>$user["nickname"], "avatar"=>$user["figureurl_1"]);
}
}
/*
* Logout
*/
function oauth_logout() {
unset($_SESSION["state"]);
unset($_SESSION["URI"]);
$session = new session();
$session->delete('oauth_data');
}
/*
* Login
*/
function oauth_login($config) {
if (!function_exists("curl_init")) {
echo "<h1>腾讯开放平台提示:请先开启curl支持</h1>";
echo "
开启php curl函数库的步骤(for windows)<br />
1).去掉windows/php.ini 文件里;extension=php_curl.dll前面的; /*用 echo phpinfo();查看php.ini的路径*/<br />
2).把php5/libeay32.dll,ssleay32.dll复制到系统目录windows/下<br />
3).重启apache<br />
";
exit();
}
$aConfig = array (
'appid' => $config['appid'],
'appkey' => $config['appkey'],
'api' => 'get_user_info,add_topic,add_one_blog,add_album,upload_pic,list_album,add_share,check_page_fans,add_t,add_pic_t,del_t,get_repost_list,get_info,get_other_info,get_fanslist,get_idollist,add_idol,del_idol,get_tenpay_addr'
);
$sState = md5(date('YmdHis' . getip()));
$_SESSION['state'] = $sState;
$server_name = strtolower($_SERVER['SERVER_NAME']);
$server_port = ($_SERVER['SERVER_PORT'] == '80') ? '' : ':' . (int)$_SERVER['SERVER_PORT'];
$secure = (isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] == 'on') ? 1 : 0;
$callback = ($secure ? 'https://' : 'http://') . $server_name . $server_port;
$callback = $callback . url('member/register/callback', array('app'=>'qq'));
$_SESSION['URI'] = $callback;
$aParam = array(
"response_type" => 'code',
"client_id" => $aConfig["appid"],
"redirect_uri" => $callback,
"scope" => $aConfig["api"],
"state" => $sState
);
$aGet = array();
foreach($aParam as $key=>$val){
$aGet[] = $key . '=' . urlencode($val);
}
$sUrl = "https://graph.qq.com/oauth2.0/authorize?";
$sUrl .= join("&", $aGet);
header("location:" . $sUrl);
}
/*
* callback
*/
function oauth_callback($config) {
$aConfig = array (
'appid' => $config['appid'],
'appkey' => $config['appkey'],
'api' => 'get_user_info,add_topic,add_one_blog,add_album,upload_pic,list_album,add_share,check_page_fans,add_t,add_pic_t,del_t,get_repost_list,get_info,get_other_info,get_fanslist,get_idollist,add_idol,del_idol,get_tenpay_addr'
);
$sUrl = "https://graph.qq.com/oauth2.0/token";
$aGetParam = array(
"grant_type" => "authorization_code",
"client_id" => $aConfig["appid"],
"client_secret" => $aConfig["appkey"],
"code" => $_GET["code"],
"state" => $_GET["state"],
"redirect_uri" => $_SESSION["URI"]
);
unset($_SESSION["state"]);
unset($_SESSION["URI"]);
$sContent = get($sUrl,$aGetParam);
if($sContent!==FALSE){
$aTemp = explode("&", $sContent);
$aParam = $oauth_data = array();
foreach($aTemp as $val){
$aTemp2 = explode("=", $val);
$aParam[$aTemp2[0]] = $aTemp2[1];
}
$oauth_data["access_token"] = $aParam["access_token"];
$sUrl = "https://graph.qq.com/oauth2.0/me";
$aGetParam = array(
"access_token" => $aParam["access_token"]
);
$sContent = get($sUrl, $aGetParam);
if($sContent!==FALSE){
$aTemp = array();
preg_match('/callback\(\s+(.*?)\s+\)/i', $sContent,$aTemp);
$aResult = json_decode($aTemp[1],true);
$session = new session();
$oauth_data['oauth_openid'] = $aResult["openid"];
$session->set('oauth_data', $oauth_data);
}
}
}
/*
* 获取IP
*/
function getip() {
if (isset ( $_SERVER )) {
if (isset ( $_SERVER ['HTTP_X_FORWARDED_FOR'] )) {
$aIps = explode ( ',', $_SERVER ['HTTP_X_FORWARDED_FOR'] );
foreach ( $aIps as $sIp ) {
$sIp = trim ( $sIp );
if ($sIp != 'unknown') {
$sRealIp = $sIp;
break;
}
}
} elseif (isset ( $_SERVER ['HTTP_CLIENT_IP'] )) {
$sRealIp = $_SERVER ['HTTP_CLIENT_IP'];
} else {
if (isset ( $_SERVER ['REMOTE_ADDR'] )) {
$sRealIp = $_SERVER ['REMOTE_ADDR'];
} else {
$sRealIp = '0.0.0.0';
}
}
} else {
if (getenv ( 'HTTP_X_FORWARDED_FOR' )) {
$sRealIp = getenv ( 'HTTP_X_FORWARDED_FOR' );
} elseif (getenv ( 'HTTP_CLIENT_IP' )) {
$sRealIp = getenv ( 'HTTP_CLIENT_IP' );
} else {
$sRealIp = getenv ( 'REMOTE_ADDR' );
}
}
return $sRealIp;
}
/*
* GET请求
*/
function get($sUrl,$aGetParam){
global $aConfig;
$oCurl = curl_init();
if(stripos($sUrl,"https://")!==FALSE){
curl_setopt($oCurl, CURLOPT_SSL_VERIFYPEER, FALSE);
curl_setopt($oCurl, CURLOPT_SSL_VERIFYHOST, FALSE);
}
$aGet = array();
foreach($aGetParam as $key=>$val){
$aGet[] = $key."=".urlencode($val);
}
curl_setopt($oCurl, CURLOPT_URL, $sUrl."?".join("&",$aGet));
curl_setopt($oCurl, CURLOPT_RETURNTRANSFER, 1 );
$sContent = curl_exec($oCurl);
$aStatus = curl_getinfo($oCurl);
curl_close($oCurl);
if(intval($aConfig["debug"])===1){
echo "<tr><td class='narrow-label'>请求地址:</td><td><pre>".$sUrl."</pre></td></tr>";
echo "<tr><td class='narrow-label'>GET参数:</td><td><pre>".var_export($aGetParam,true)."</pre></td></tr>";
echo "<tr><td class='narrow-label'>请求信息:</td><td><pre>".var_export($aStatus,true)."</pre></td></tr>";
if(intval($aStatus["http_code"])==200){
echo "<tr><td class='narrow-label'>返回结果:</td><td><pre>".$sContent."</pre></td></tr>";
if((@$aResult = json_decode($sContent,true))){
echo "<tr><td class='narrow-label'>结果集合解析:</td><td><pre>".var_export($aResult,true)."</pre></td></tr>";
}
}
}
if(intval($aStatus["http_code"])==200){
return $sContent;
}else{
echo "<tr><td class='narrow-label'>返回出错:</td><td><pre>".$aStatus["http_code"].",请检查参数或者确实是腾讯服务器出错咯。</pre></td></tr>";
return FALSE;
}
}
/*
* POST 请求
*/
function post($sUrl,$aPOSTParam){
global $aConfig;
$oCurl = curl_init();
if(stripos($sUrl,"https://")!==FALSE){
curl_setopt($oCurl, CURLOPT_SSL_VERIFYPEER, FALSE);
curl_setopt($oCurl, CURLOPT_SSL_VERIFYHOST, false);
}
$aPOST = array();
foreach($aPOSTParam as $key=>$val){
$aPOST[] = $key."=".urlencode($val);
}
curl_setopt($oCurl, CURLOPT_URL, $sUrl);
curl_setopt($oCurl, CURLOPT_RETURNTRANSFER, 1 );
curl_setopt($oCurl, CURLOPT_POST,true);
curl_setopt($oCurl, CURLOPT_POSTFIELDS, join("&", $aPOST));
$sContent = curl_exec($oCurl);
$aStatus = curl_getinfo($oCurl);
curl_close($oCurl);
if(intval($aConfig["debug"])===1){
echo "<tr><td class='narrow-label'>请求地址:</td><td><pre>".$sUrl."</pre></td></tr>";
echo "<tr><td class='narrow-label'>POST参数:</td><td><pre>".var_export($aPOSTParam,true)."</pre></td></tr>";
echo "<tr><td class='narrow-label'>请求信息:</td><td><pre>".var_export($aStatus,true)."</pre></td></tr>";
if(intval($aStatus["http_code"])==200){
echo "<tr><td class='narrow-label'>返回结果:</td><td><pre>".$sContent."</pre></td></tr>";
if((@$aResult = json_decode($sContent,true))){
echo "<tr><td class='narrow-label'>结果集合解析:</td><td><pre>".var_export($aResult,true)."</pre></td></tr>";
}
}
}
if(intval($aStatus["http_code"])==200){
return $sContent;
}else{
echo "<tr><td class='narrow-label'>返回出错:</td><td><pre>".$aStatus["http_code"].",请检查参数或者确实是腾讯服务器出错咯。</pre></td></tr>";
return FALSE;
}
}
/*
* 上传图片
*/
function upload($sUrl,$aPOSTParam,$aFileParam){
//防止请求超时
global $aConfig;
set_time_limit(0);
$oCurl = curl_init();
if(stripos($sUrl,"https://")!==FALSE){
curl_setopt($oCurl, CURLOPT_SSL_VERIFYPEER, FALSE);
curl_setopt($oCurl, CURLOPT_SSL_VERIFYHOST, false);
}
$aPOSTField = array();
foreach($aPOSTParam as $key=>$val){
$aPOSTField[$key]= $val;
}
foreach($aFileParam as $key=>$val){
$aPOSTField[$key] = "@".$val; //此处对应的是文件的绝对地址
}
curl_setopt($oCurl, CURLOPT_URL, $sUrl);
curl_setopt($oCurl, CURLOPT_POST, true);
curl_setopt($oCurl, CURLOPT_RETURNTRANSFER, 1 );
curl_setopt($oCurl, CURLOPT_POSTFIELDS, $aPOSTField);
$sContent = curl_exec($oCurl);
$aStatus = curl_getinfo($oCurl);
curl_close($oCurl);
if(intval($aConfig["debug"])===1){
echo "<tr><td class='narrow-label'>请求地址:</td><td><pre>".$sUrl."</pre></td></tr>";
echo "<tr><td class='narrow-label'>POST参数:</td><td><pre>".var_export($aPOSTParam,true)."</pre></td></tr>";
echo "<tr><td class='narrow-label'>文件参数:</td><td><pre>".var_export($aFileParam,true)."</pre></td></tr>";
echo "<tr><td class='narrow-label'>请求信息:</td><td><pre>".var_export($aStatus,true)."</pre></td></tr>";
if(intval($aStatus["http_code"])==200){
echo "<tr><td class='narrow-label'>返回结果:</td><td><pre>".$sContent."</pre></td></tr>";
if((@$aResult = json_decode($sContent,true))){
echo "<tr><td class='narrow-label'>结果集合解析:</td><td><pre>".var_export($aResult,true)."</pre></td></tr>";
}
}
}
if(intval($aStatus["http_code"])==200){
return $sContent;
}else{
echo "<tr><td class='narrow-label'>返回出错:</td><td><pre>".$aStatus["http_code"].",请检查参数或者确实是腾讯服务器出错咯。</pre></td></tr>";
return FALSE;
}
}
function download($sUrl,$sFileName){
$oCurl = curl_init();
global $aConfig;
set_time_limit(0);
$oCurl = curl_init();
if(stripos($sUrl,"https://")!==FALSE){
curl_setopt($oCurl, CURLOPT_SSL_VERIFYPEER, FALSE);
curl_setopt($oCurl, CURLOPT_SSL_VERIFYHOST, false);
}
curl_setopt($oCurl, CURLOPT_USERAGENT, $_SERVER["USER_AGENT"] ? $_SERVER["USER_AGENT"] : "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.7) Gecko/20100625 Firefox/3.6.7");
curl_setopt($oCurl, CURLOPT_URL, $sUrl);
curl_setopt($oCurl, CURLOPT_REFERER, $sUrl);
curl_setopt($oCurl, CURLOPT_AUTOREFERER, true);
curl_setopt($oCurl, CURLOPT_RETURNTRANSFER, 1 );
$sContent = curl_exec($oCurl);
$aStatus = curl_getinfo($oCurl);
curl_close($oCurl);
file_put_contents($sFileName,$sContent);
if(intval($aConfig["debug"])===1){
echo "<tr><td class='narrow-label'>请求地址:</td><td><pre>".$sUrl."</pre></td></tr>";
echo "<tr><td class='narrow-label'>请求信息:</td><td><pre>".var_export($aStatus,true)."</pre></td></tr>";
}
return(intval($aStatus["http_code"])==200);
} | huahuajjh/ppx | src/extensions/oauth/qq.php | PHP | apache-2.0 | 11,607 |
/*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "The Jakarta Project", "Tomcat", and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
* ====================================================================
*
* This source code implements specifications defined by the Java
* Community Process. In order to remain compliant with the specification
* DO NOT add / change / or delete method signatures!
*/
package javax.servlet.http;
import javax.servlet.ServletRequest;
import java.util.Enumeration;
/**
*
* Extends the {@link javax.servlet.ServletRequest} interface
* to provide request information for HTTP servlets.
*
* <p>The servlet container creates an <code>HttpServletRequest</code>
* object and passes it as an argument to the servlet's service
* methods (<code>doGet</code>, <code>doPost</code>, etc).
*
*
* @author Various
* @version $Version$
*
*
*/
public interface HttpServletRequest extends ServletRequest {
/**
* String identifier for Basic authentication. Value "BASIC"
*/
public static final String BASIC_AUTH = "BASIC";
/**
* String identifier for Form authentication. Value "FORM"
*/
public static final String FORM_AUTH = "FORM";
/**
* String identifier for Client Certificate authentication. Value "CLIENT_CERT"
*/
public static final String CLIENT_CERT_AUTH = "CLIENT_CERT";
/**
* String identifier for Digest authentication. Value "DIGEST"
*/
public static final String DIGEST_AUTH = "DIGEST";
/**
* Returns the name of the authentication scheme used to protect
* the servlet. All servlet containers support basic, form and client
* certificate authentication, and may additionally support digest
* authentication.
* If the servlet is not authenticated <code>null</code> is returned.
*
* <p>Same as the value of the CGI variable AUTH_TYPE.
*
*
* @return one of the static members BASIC_AUTH,
* FORM_AUTH, CLIENT_CERT_AUTH, DIGEST_AUTH
* (suitable for == comparison) or
* the container-specific string indicating
* the authentication scheme, or
* <code>null</code> if the request was
* not authenticated.
*
*/
public String getAuthType();
/**
*
* Returns an array containing all of the <code>Cookie</code>
* objects the client sent with this request.
* This method returns <code>null</code> if no cookies were sent.
*
* @return an array of all the <code>Cookies</code>
* included with this request, or <code>null</code>
* if the request has no cookies
*
*
*/
public Cookie[] getCookies();
/**
*
* Returns the value of the specified request header
* as a <code>long</code> value that represents a
* <code>Date</code> object. Use this method with
* headers that contain dates, such as
* <code>If-Modified-Since</code>.
*
* <p>The date is returned as
* the number of milliseconds since January 1, 1970 GMT.
* The header name is case insensitive.
*
* <p>If the request did not have a header of the
* specified name, this method returns -1. If the header
* can't be converted to a date, the method throws
* an <code>IllegalArgumentException</code>.
*
* @param name a <code>String</code> specifying the
* name of the header
*
* @return a <code>long</code> value
* representing the date specified
* in the header expressed as
* the number of milliseconds
* since January 1, 1970 GMT,
* or -1 if the named header
* was not included with the
* request
*
* @exception IllegalArgumentException If the header value
* can't be converted
* to a date
*
*/
public long getDateHeader(String name);
/**
*
* Returns the value of the specified request header
* as a <code>String</code>. If the request did not include a header
* of the specified name, this method returns <code>null</code>.
* If there are multiple headers with the same name, this method
* returns the first head in the request.
* The header name is case insensitive. You can use
* this method with any request header.
*
* @param name a <code>String</code> specifying the
* header name
*
* @return a <code>String</code> containing the
* value of the requested
* header, or <code>null</code>
* if the request does not
* have a header of that name
*
*/
public String getHeader(String name);
/**
*
* Returns all the values of the specified request header
* as an <code>Enumeration</code> of <code>String</code> objects.
*
* <p>Some headers, such as <code>Accept-Language</code> can be sent
* by clients as several headers each with a different value rather than
* sending the header as a comma separated list.
*
* <p>If the request did not include any headers
* of the specified name, this method returns an empty
* <code>Enumeration</code>.
* The header name is case insensitive. You can use
* this method with any request header.
*
* @param name a <code>String</code> specifying the
* header name
*
* @return an <code>Enumeration</code> containing
* the values of the requested header. If
* the request does not have any headers of
* that name return an empty
* enumeration. If
* the container does not allow access to
* header information, return null
*
*/
public Enumeration getHeaders(String name);
/**
*
* Returns an enumeration of all the header names
* this request contains. If the request has no
* headers, this method returns an empty enumeration.
*
* <p>Some servlet containers do not allow
* servlets to access headers using this method, in
* which case this method returns <code>null</code>
*
* @return an enumeration of all the
* header names sent with this
* request; if the request has
* no headers, an empty enumeration;
* if the servlet container does not
* allow servlets to use this method,
* <code>null</code>
*
*
*/
public Enumeration getHeaderNames();
/**
*
* Returns the value of the specified request header
* as an <code>int</code>. If the request does not have a header
* of the specified name, this method returns -1. If the
* header cannot be converted to an integer, this method
* throws a <code>NumberFormatException</code>.
*
* <p>The header name is case insensitive.
*
* @param name a <code>String</code> specifying the name
* of a request header
*
* @return an integer expressing the value
* of the request header or -1
* if the request doesn't have a
* header of this name
*
* @exception NumberFormatException If the header value
* can't be converted
* to an <code>int</code>
*/
public int getIntHeader(String name);
/**
*
* Returns the name of the HTTP method with which this
* request was made, for example, GET, POST, or PUT.
* Same as the value of the CGI variable REQUEST_METHOD.
*
* @return a <code>String</code>
* specifying the name
* of the method with which
* this request was made
*
*/
public String getMethod();
/**
*
* Returns any extra path information associated with
* the URL the client sent when it made this request.
* The extra path information follows the servlet path
* but precedes the query string and will start with
* a "/" character.
*
* <p>This method returns <code>null</code> if there
* was no extra path information.
*
* <p>Same as the value of the CGI variable PATH_INFO.
*
*
* @return a <code>String</code>, decoded by the
* web container, specifying
* extra path information that comes
* after the servlet path but before
* the query string in the request URL;
* or <code>null</code> if the URL does not have
* any extra path information
*
*/
public String getPathInfo();
/**
*
* Returns any extra path information after the servlet name
* but before the query string, and translates it to a real
* path. Same as the value of the CGI variable PATH_TRANSLATED.
*
* <p>If the URL does not have any extra path information,
* this method returns <code>null</code> or the servlet container
* cannot translate the virtual path to a real path for any reason
* (such as when the web application is executed from an archive).
*
* The web container does not decode this string.
*
*
* @return a <code>String</code> specifying the
* real path, or <code>null</code> if
* the URL does not have any extra path
* information
*
*
*/
public String getPathTranslated();
/**
*
* Returns the portion of the request URI that indicates the context
* of the request. The context path always comes first in a request
* URI. The path starts with a "/" character but does not end with a "/"
* character. For servlets in the default (root) context, this method
* returns "". The container does not decode this string.
*
*
* @return a <code>String</code> specifying the
* portion of the request URI that indicates the context
* of the request
*
*
*/
public String getContextPath();
/**
*
* Returns the query string that is contained in the request
* URL after the path. This method returns <code>null</code>
* if the URL does not have a query string. Same as the value
* of the CGI variable QUERY_STRING.
*
* @return a <code>String</code> containing the query
* string or <code>null</code> if the URL
* contains no query string. The value is not
* decoded by the container.
*
*/
public String getQueryString();
/**
*
* Returns the login of the user making this request, if the
* user has been authenticated, or <code>null</code> if the user
* has not been authenticated.
* Whether the user name is sent with each subsequent request
* depends on the browser and type of authentication. Same as the
* value of the CGI variable REMOTE_USER.
*
* @return a <code>String</code> specifying the login
* of the user making this request, or <code>null</code>
* if the user login is not known
*
*/
public String getRemoteUser();
/**
*
* Returns a boolean indicating whether the authenticated user is included
* in the specified logical "role". Roles and role membership can be
* defined using deployment descriptors. If the user has not been
* authenticated, the method returns <code>false</code>.
*
* @param role a <code>String</code> specifying the name
* of the role
*
* @return a <code>boolean</code> indicating whether
* the user making this request belongs to a given role;
* <code>false</code> if the user has not been
* authenticated
*
*/
public boolean isUserInRole(String role);
/**
*
* Returns a <code>java.security.Principal</code> object containing
* the name of the current authenticated user. If the user has not been
* authenticated, the method returns <code>null</code>.
*
* @return a <code>java.security.Principal</code> containing
* the name of the user making this request;
* <code>null</code> if the user has not been
* authenticated
*
*/
public java.security.Principal getUserPrincipal();
/**
*
* Returns the session ID specified by the client. This may
* not be the same as the ID of the current valid session
* for this request.
* If the client did not specify a session ID, this method returns
* <code>null</code>.
*
*
* @return a <code>String</code> specifying the session
* ID, or <code>null</code> if the request did
* not specify a session ID
*
* @see #isRequestedSessionIdValid
*
*/
public String getRequestedSessionId();
/**
*
* Returns the part of this request's URL from the protocol
* name up to the query string in the first line of the HTTP request.
* The web container does not decode this String.
* For example:
*
*
* <table summary="Examples of Returned Values">
* <tr align=left><th>First line of HTTP request </th>
* <th> Returned Value</th>
* <tr><td>POST /some/path.html HTTP/1.1<td><td>/some/path.html
* <tr><td>GET http://foo.bar/a.html HTTP/1.0
* <td><td>/a.html
* <tr><td>HEAD /xyz?a=b HTTP/1.1<td><td>/xyz
* </table>
*
* <p>To reconstruct an URL with a scheme and host, use
* {@link HttpUtils#getRequestURL}.
*
* @return a <code>String</code> containing
* the part of the URL from the
* protocol name up to the query string
*
* @see HttpUtils#getRequestURL
*
*/
public String getRequestURI();
/**
*
* Reconstructs the URL the client used to make the request.
* The returned URL contains a protocol, server name, port
* number, and server path, but it does not include query
* string parameters.
*
* <p>Because this method returns a <code>StringBuffer</code>,
* not a string, you can modify the URL easily, for example,
* to append query parameters.
*
* <p>This method is useful for creating redirect messages
* and for reporting errors.
*
* @return a <code>StringBuffer</code> object containing
* the reconstructed URL
*
*/
public StringBuffer getRequestURL();
/**
*
* Returns the part of this request's URL that calls
* the servlet. This path starts with a "/" character
* and includes either the servlet name or a path to
* the servlet, but does not include any extra path
* information or a query string. Same as the value of
* the CGI variable SCRIPT_NAME.
*
* <p>This method will return an empty string ("") if the
* servlet used to process this request was matched using
* the "/*" pattern.
*
* @return a <code>String</code> containing
* the name or path of the servlet being
* called, as specified in the request URL,
* decoded, or an empty string if the servlet
* used to process the request is matched
* using the "/*" pattern.
*
*/
public String getServletPath();
/**
*
* Returns the current <code>HttpSession</code>
* associated with this request or, if there is no
* current session and <code>create</code> is true, returns
* a new session.
*
* <p>If <code>create</code> is <code>false</code>
* and the request has no valid <code>HttpSession</code>,
* this method returns <code>null</code>.
*
* <p>To make sure the session is properly maintained,
* you must call this method before
* the response is committed. If the container is using cookies
* to maintain session integrity and is asked to create a new session
* when the response is committed, an IllegalStateException is thrown.
*
*
*
*
* @param create <code>true</code> to create
* a new session for this request if necessary;
* <code>false</code> to return <code>null</code>
* if there's no current session
*
*
* @return the <code>HttpSession</code> associated
* with this request or <code>null</code> if
* <code>create</code> is <code>false</code>
* and the request has no valid session
*
* @see #getSession()
*
*
*/
public HttpSession getSession(boolean create);
/**
*
* Returns the current session associated with this request,
* or if the request does not have a session, creates one.
*
* @return the <code>HttpSession</code> associated
* with this request
*
* @see #getSession(boolean)
*
*/
public HttpSession getSession();
/**
*
* Checks whether the requested session ID is still valid.
*
* @return <code>true</code> if this
* request has an id for a valid session
* in the current session context;
* <code>false</code> otherwise
*
* @see #getRequestedSessionId
* @see #getSession
* @see HttpSessionContext
*
*/
public boolean isRequestedSessionIdValid();
/**
*
* Checks whether the requested session ID came in as a cookie.
*
* @return <code>true</code> if the session ID
* came in as a
* cookie; otherwise, <code>false</code>
*
*
* @see #getSession
*
*/
public boolean isRequestedSessionIdFromCookie();
/**
*
* Checks whether the requested session ID came in as part of the
* request URL.
*
* @return <code>true</code> if the session ID
* came in as part of a URL; otherwise,
* <code>false</code>
*
*
* @see #getSession
*
*/
public boolean isRequestedSessionIdFromURL();
/**
*
* @deprecated As of Version 2.1 of the Java Servlet
* API, use {@link #isRequestedSessionIdFromURL}
* instead.
*
*/
public boolean isRequestedSessionIdFromUrl();
}
| devjin24/howtomcatworks | bookrefer/jakarta-tomcat-5.0.18-src/jakarta-servletapi-5/jsr154/src/share/javax/servlet/http/HttpServletRequest.java | Java | apache-2.0 | 20,909 |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.internal;
/**
* @deprecated Use {@link org.openqa.selenium.BuildInfo} instead.
*/
@Deprecated
public class BuildInfo extends org.openqa.selenium.BuildInfo {
}
| 5hawnknight/selenium | java/client/src/org/openqa/selenium/internal/BuildInfo.java | Java | apache-2.0 | 995 |
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
import google.api_core.operations_v1
from google.cloud.dataproc_v1beta2.proto import workflow_templates_pb2_grpc
class WorkflowTemplateServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.cloud.dataproc.v1beta2 WorkflowTemplateService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
def __init__(
self, channel=None, credentials=None, address="dataproc.googleapis.com:443"
):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
"The `channel` and `credentials` arguments are mutually " "exclusive."
)
# Create the channel.
if channel is None:
channel = self.create_channel(address=address, credentials=credentials)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
"workflow_template_service_stub": workflow_templates_pb2_grpc.WorkflowTemplateServiceStub(
channel
)
}
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
# instantiate an LRO client.
self._operations_client = google.api_core.operations_v1.OperationsClient(
channel
)
@classmethod
def create_channel(cls, address="dataproc.googleapis.com:443", credentials=None):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address, credentials=credentials, scopes=cls._OAUTH_SCOPES
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def create_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Creates new workflow template.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate
@property
def get_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Retrieves the latest workflow template.
Can retrieve previously instantiated template by specifying optional
version parameter.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate
@property
def instantiate_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Instantiates a template and begins execution.
The returned Operation can be used to track execution of workflow by
polling ``operations.get``. The Operation will complete when entire
workflow is finished.
The running workflow can be aborted via ``operations.cancel``. This will
cause any inflight jobs to be cancelled and workflow-owned clusters to
be deleted.
The ``Operation.metadata`` will be ``WorkflowMetadata``.
On successful completion, ``Operation.response`` will be ``Empty``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].InstantiateWorkflowTemplate
@property
def instantiate_inline_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Instantiates a template and begins execution.
This method is equivalent to executing the sequence
``CreateWorkflowTemplate``, ``InstantiateWorkflowTemplate``,
``DeleteWorkflowTemplate``.
The returned Operation can be used to track execution of workflow by
polling ``operations.get``. The Operation will complete when entire
workflow is finished.
The running workflow can be aborted via ``operations.cancel``. This will
cause any inflight jobs to be cancelled and workflow-owned clusters to
be deleted.
The ``Operation.metadata`` will be ``WorkflowMetadata``.
On successful completion, ``Operation.response`` will be ``Empty``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs[
"workflow_template_service_stub"
].InstantiateInlineWorkflowTemplate
@property
def update_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Updates (replaces) workflow template. The updated template
must contain version that matches the current server version.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].UpdateWorkflowTemplate
@property
def list_workflow_templates(self):
"""Return the gRPC stub for {$apiMethod.name}.
Lists workflows that match the specified filter in the request.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].ListWorkflowTemplates
@property
def delete_workflow_template(self):
"""Return the gRPC stub for {$apiMethod.name}.
Deletes a workflow template. It does not cancel in-progress workflows.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs["workflow_template_service_stub"].DeleteWorkflowTemplate
| dhermes/google-cloud-python | dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py | Python | apache-2.0 | 8,646 |
#include "OccupancyGrid/occgrid.hpp"
#include "OccupancyGrid/cvmat_serialization.h"
#include <opencv2/opencv.hpp>
#include <boost/format.hpp>
/// Override class to override is_occupied function so that it can copy the
/// ground truth map everytime a laser crosses a cell.
template<typename real_t, typename int_t>
class OccupancyGrid2DInverseSensor : public OccupancyGrid2D<real_t, int_t> {
public:
using OccupancyGrid2D<real_t, int_t>::og_;
using OccupancyGrid2D<real_t, int_t>::cell_size_;
using OccupancyGrid2D<real_t, int_t>::min_pt_;
using OccupancyGrid2D<real_t, int_t>::FREE;
int_t observed_manh_range_;
cv::Vec<int_t, 2> robot_position_;
cv::Mat_<real_t> log_odds_map_;
const real_t LOG_ODDS_OCCUPIED;
const real_t LOG_ODDS_FREE;
OccupancyGrid2DInverseSensor(real_t min_x, real_t min_y, real_t cell_size_x, real_t
cell_size_y, int_t ncells_x, int_t ncells_y) :
OccupancyGrid2D<real_t, int_t>(min_x, min_y, cell_size_x, cell_size_y,
ncells_x, ncells_y),
observed_manh_range_(),
robot_position_(),
log_odds_map_(ncells_x, ncells_y, 0.0L),
LOG_ODDS_OCCUPIED(1.3863),
LOG_ODDS_FREE(-1.3863)
{
};
void set_up_ray_trace(
real_t px,
real_t py,
real_t ptheta,
real_t observed_range) {
robot_position_(0) =
(int)floor((px - min_pt_(0)) / cell_size_(0));
robot_position_(1) =
(int)floor((py - min_pt_(1)) / cell_size_(1));
real_t dx_abs = fabs(cos(ptheta));
real_t dy_abs = fabs(sin(ptheta));
real_t dmag = sqrt(dx_abs * dx_abs + dy_abs * dy_abs);
observed_manh_range_ =
floor(observed_range * dx_abs / dmag / cell_size_(0)) +
floor(observed_range * dy_abs / dmag / cell_size_(1));
//printf("-----------------\n");
}
inline int_t manh_distance(int_t i, int_t j) {
return std::abs(i - robot_position_(0)) + std::abs(j - robot_position_(1));
}
virtual bool is_occupied(int_t i, int_t j) {
uint8_t val = og_.ptr(i)[j];
bool retval = (val != FREE);
int_t d = manh_distance(i, j);
// update step
// printf("%d < %d\n", d, observed_manh_range_);
log_odds_map_(i, j) +=
(d < observed_manh_range_) ? LOG_ODDS_FREE
: (d == observed_manh_range_) ? LOG_ODDS_OCCUPIED
: 0; // unknown
return retval;
}
inline void show(int r) {
cv::Mat vis;
cv::exp(log_odds_map_, vis);
vis = 1 / (1 + vis);
vis *= 255;
vis.convertTo(vis, CV_8U);
cv::imshow("c", vis);
//cv::imwrite((boost::format("out-%d.png") % r).str(), vis);
cv::waitKey(1);
cv::imwrite("/tmp/two_assumption_algo.png", vis);
}
};
int main(int argc, char** argv) {
if (argc != 4) {
std::cout << "Sample Usage:" << std::endl;
std::cout << "bin/two_assumption_alg Data/player_sim/laser_pose_all.bin Data/player_sim/laser_range_all.bin Data/player_sim/scan_angles_all.bin" << std::endl;
exit(1);
}
cv::Mat laser_pose;
loadMat(laser_pose, argv[1]);
cv::Mat laser_ranges;
loadMat(laser_ranges, argv[2]);
cv::Mat scan_angles;
loadMat(scan_angles, argv[3]);
cv::Vec2d min_pt(-9, -9);
cv::Vec2d range = -2 * min_pt;
cv::Vec2i gridsize(100, 100);
cv::Vec2d cellsize;
cv::divide(range, gridsize, cellsize);
//std::cout << cellsize(0) << cellsize(1) << std::endl;
cv::Vec2i ncells;
cv::divide(min_pt, cellsize, ncells, -2);
//std::cout << ncells(0) << ncells(1) << std::endl;
OccupancyGrid2DInverseSensor<double, int> map(
min_pt(0),
min_pt(1),
cellsize(0),
cellsize(1),
ncells(0),
ncells(1));
double MAX_RANGE = 8;
int r;
for (r = 0; r < laser_pose.rows; r++) {
double* pose = laser_pose.ptr<double>(r);
double* ranges = laser_ranges.ptr<double>(r);
double* angles = scan_angles.ptr<double>(r);
double robot_angle = pose[2];
for (int c = 0; c < scan_angles.cols; c++) {
double total_angle = robot_angle + angles[c];
cv::Vec2d final_pos;
map.set_up_ray_trace(pose[0], pose[1], total_angle, ranges[c]);
bool reflectance;
map.ray_trace(pose[0], pose[1], total_angle, MAX_RANGE, final_pos, reflectance);
}
}
map.show(r);
}
| wecacuee/modern-occupancy-grid | src/two_assumption_alg.cpp | C++ | apache-2.0 | 4,475 |
/**
* Copyright 2010-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.migration.commands;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.util.Properties;
public final class InfoCommand implements Command {
private final PrintStream out;
public InfoCommand(PrintStream out) {
this.out = out;
}
public void execute(String... params) {
Properties properties = new Properties();
InputStream input = getClass().getClassLoader().getResourceAsStream(
"META-INF/maven/org.mybatis/mybatis-migrations/pom.properties");
if (input != null) {
try {
properties.load(input);
} catch (IOException e) {
// ignore, just don't load the properties
} finally {
try {
input.close();
} catch (IOException e) {
// close quietly
}
}
}
out.printf("%s %s (%s)%n",
properties.getProperty("name"),
properties.getProperty("version"),
properties.getProperty("build"));
out.printf("Java version: %s, vendor: %s%n",
System.getProperty("java.version"),
System.getProperty("java.vendor"));
out.printf("Java home: %s%n", System.getProperty("java.home"));
out.printf("Default locale: %s_%s, platform encoding: %s%n",
System.getProperty("user.language"),
System.getProperty("user.country"),
System.getProperty("sun.jnu.encoding"));
out.printf("OS name: \"%s\", version: \"%s\", arch: \"%s\", family: \"%s\"%n",
System.getProperty("os.name"),
System.getProperty("os.version"),
System.getProperty("os.arch"),
getOsFamily());
}
private static final String getOsFamily() {
String osName = System.getProperty("os.name").toLowerCase();
String pathSep = System.getProperty("path.separator");
if (osName.indexOf("windows") != -1) {
return "windows";
} else if (osName.indexOf("os/2") != -1) {
return "os/2";
} else if (osName.indexOf("z/os") != -1 || osName.indexOf("os/390") != -1) {
return "z/os";
} else if (osName.indexOf("os/400") != -1) {
return "os/400";
} else if (pathSep.equals(";")) {
return "dos";
} else if (osName.indexOf("mac") != -1) {
if (osName.endsWith("x")) {
return "mac"; // MACOSX
}
return "unix";
} else if (osName.indexOf("nonstop_kernel") != -1) {
return "tandem";
} else if (osName.indexOf("openvms") != -1) {
return "openvms";
} else if (pathSep.equals(":")) {
return "unix";
}
return "undefined";
}
}
| bradsokol/migrations | src/main/java/org/apache/ibatis/migration/commands/InfoCommand.java | Java | apache-2.0 | 3,217 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.resource;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.cloud.agent.api.StartupCommand;
import com.cloud.agent.api.StartupRoutingCommand;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.PodCluster;
import com.cloud.exception.AgentUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.Host.Type;
import com.cloud.host.HostStats;
import com.cloud.host.HostVO;
import com.cloud.host.Status;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.resource.ResourceState.Event;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.template.VirtualMachineTemplate;
import com.cloud.utils.Pair;
import com.cloud.utils.fsm.NoTransitionException;
/**
* ResourceManager manages how physical resources are organized within the
* CloudStack. It also manages the life cycle of the physical resources.
*/
public interface ResourceManager extends ResourceService{
/**
* Register a listener for different types of resource life cycle events.
* There can only be one type of listener per type of host.
*
* @param Event type see ResourceListener.java, allow combination of multiple events.
* @param listener the listener to notify.
*/
public void registerResourceEvent(Integer event, ResourceListener listener);
public void unregisterResourceEvent(ResourceListener listener);
/**
*
* @param name of adapter
* @param adapter
* @param hates, a list of names which will be eliminated by this adapter. Especially for the case where
* can be only one adapter responds to an event, e.g. startupCommand
*/
public void registerResourceStateAdapter(String name, ResourceStateAdapter adapter);
public void unregisterResourceStateAdapter(String name);
public Host createHostAndAgent(Long hostId, ServerResource resource, Map<String, String> details, boolean old, List<String> hostTags,
boolean forRebalance);
public Host addHost(long zoneId, ServerResource resource, Type hostType, Map<String, String> hostDetails);
public HostVO createHostVOForConnectedAgent(StartupCommand[] cmds);
public void checkCIDR(HostPodVO pod, DataCenterVO dc, String serverPrivateIP, String serverPrivateNetmask);
public HostVO fillRoutingHostVO(HostVO host, StartupRoutingCommand ssCmd, HypervisorType hyType, Map<String, String> details, List<String> hostTags);
public void deleteRoutingHost(HostVO host, boolean isForced, boolean forceDestroyStorage) throws UnableDeleteHostException;
public boolean executeUserRequest(long hostId, ResourceState.Event event) throws AgentUnavailableException;
boolean resourceStateTransitTo(Host host, Event event, long msId) throws NoTransitionException;
boolean umanageHost(long hostId);
boolean maintenanceFailed(long hostId);
public boolean maintain(final long hostId) throws AgentUnavailableException;
@Override
public boolean deleteHost(long hostId, boolean isForced, boolean isForceDeleteStorage);
public List<HostVO> findDirectlyConnectedHosts();
public List<HostVO> listAllUpAndEnabledHosts(Host.Type type, Long clusterId, Long podId, long dcId);
public List<HostVO> listAllHostsInCluster(long clusterId);
public List<HostVO> listHostsInClusterByStatus(long clusterId, Status status);
public List<HostVO> listAllUpAndEnabledHostsInOneZoneByType(Host.Type type, long dcId);
public List<HostVO> listAllHostsInOneZoneByType(Host.Type type, long dcId);
public List<HostVO> listAllHostsInAllZonesByType(Type type);
public List<HypervisorType> listAvailHypervisorInZone(Long hostId, Long zoneId);
public HostVO findHostByGuid(String guid);
public HostVO findHostByName(String name);
public List<HostVO> listHostsByNameLike(String name);
/**
* Find a pod based on the user id, template, and data center.
*
* @param template
* @param dc
* @param userId
* @return
*/
Pair<HostPodVO, Long> findPod(VirtualMachineTemplate template, ServiceOfferingVO offering, DataCenterVO dc, long accountId, Set<Long> avoids);
HostStats getHostStatistics(long hostId);
Long getGuestOSCategoryId(long hostId);
String getHostTags(long hostId);
List<PodCluster> listByDataCenter(long dcId);
List<HostVO> listAllNotInMaintenanceHostsInOneZone(Type type, Long dcId);
HypervisorType getDefaultHypervisor(long zoneId);
HypervisorType getAvailableHypervisor(long zoneId);
Discoverer getMatchingDiscover(HypervisorType hypervisorType);
List<HostVO> findHostByGuid(long dcId, String guid);
/**
* @param type
* @param clusterId
* @param podId
* @param dcId
* @return
*/
List<HostVO> listAllUpAndEnabledNonHAHosts(Type type, Long clusterId, Long podId, long dcId);
}
| argv0/cloudstack | server/src/com/cloud/resource/ResourceManager.java | Java | apache-2.0 | 5,753 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/securityhub/model/AwsEcsTaskDefinitionInferenceAcceleratorsDetails.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace SecurityHub
{
namespace Model
{
AwsEcsTaskDefinitionInferenceAcceleratorsDetails::AwsEcsTaskDefinitionInferenceAcceleratorsDetails() :
m_deviceNameHasBeenSet(false),
m_deviceTypeHasBeenSet(false)
{
}
AwsEcsTaskDefinitionInferenceAcceleratorsDetails::AwsEcsTaskDefinitionInferenceAcceleratorsDetails(JsonView jsonValue) :
m_deviceNameHasBeenSet(false),
m_deviceTypeHasBeenSet(false)
{
*this = jsonValue;
}
AwsEcsTaskDefinitionInferenceAcceleratorsDetails& AwsEcsTaskDefinitionInferenceAcceleratorsDetails::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("DeviceName"))
{
m_deviceName = jsonValue.GetString("DeviceName");
m_deviceNameHasBeenSet = true;
}
if(jsonValue.ValueExists("DeviceType"))
{
m_deviceType = jsonValue.GetString("DeviceType");
m_deviceTypeHasBeenSet = true;
}
return *this;
}
JsonValue AwsEcsTaskDefinitionInferenceAcceleratorsDetails::Jsonize() const
{
JsonValue payload;
if(m_deviceNameHasBeenSet)
{
payload.WithString("DeviceName", m_deviceName);
}
if(m_deviceTypeHasBeenSet)
{
payload.WithString("DeviceType", m_deviceType);
}
return payload;
}
} // namespace Model
} // namespace SecurityHub
} // namespace Aws
| awslabs/aws-sdk-cpp | aws-cpp-sdk-securityhub/source/model/AwsEcsTaskDefinitionInferenceAcceleratorsDetails.cpp | C++ | apache-2.0 | 1,594 |
package mil.nga.giat.geowave.analytics.kmeans.mapreduce;
import java.io.IOException;
import java.util.List;
import mil.nga.giat.geowave.accumulo.mapreduce.GeoWaveWritableInputMapper;
import mil.nga.giat.geowave.accumulo.mapreduce.input.GeoWaveInputKey;
import mil.nga.giat.geowave.analytics.clustering.CentroidManagerGeoWave;
import mil.nga.giat.geowave.analytics.clustering.CentroidPairing;
import mil.nga.giat.geowave.analytics.clustering.NestedGroupCentroidAssignment;
import mil.nga.giat.geowave.analytics.extract.CentroidExtractor;
import mil.nga.giat.geowave.analytics.extract.SimpleFeatureCentroidExtractor;
import mil.nga.giat.geowave.analytics.kmeans.AssociationNotification;
import mil.nga.giat.geowave.analytics.parameters.CentroidParameters;
import mil.nga.giat.geowave.analytics.parameters.JumpParameters;
import mil.nga.giat.geowave.analytics.tools.AnalyticItemWrapper;
import mil.nga.giat.geowave.analytics.tools.AnalyticItemWrapperFactory;
import mil.nga.giat.geowave.analytics.tools.ConfigurationWrapper;
import mil.nga.giat.geowave.analytics.tools.SimpleFeatureItemWrapperFactory;
import mil.nga.giat.geowave.analytics.tools.mapreduce.CountofDoubleWritable;
import mil.nga.giat.geowave.analytics.tools.mapreduce.JobContextConfigurationWrapper;
import mil.nga.giat.geowave.index.StringUtils;
import org.apache.accumulo.core.data.Mutation;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vividsolutions.jts.geom.Point;
/**
* Calculate the distortation.
* <p/>
* See Catherine A. Sugar and Gareth M. James (2003).
* "Finding the number of clusters in a data set: An information theoretic approach"
* Journal of the American Statistical Association 98 (January): 750–763
*
* @formatter:off Context configuration parameters include:
* <p/>
* "KMeansDistortionMapReduce.Common.DistanceFunctionClass" ->
* {@link mil.nga.giat.geowave.analytics.distance.DistanceFn}
* used to determine distance to centroid
* <p/>
* "KMeansDistortionMapReduce.Centroid.WrapperFactoryClass" ->
* {@link AnalyticItemWrapperFactory} to extract wrap spatial
* objects with Centroid management functions
* <p/>
* "KMeansDistortionMapReduce.Centroid.ExtractorClass" ->
* {@link mil.nga.giat.geowave.analytics.extract.CentroidExtractor}
* <p/>
* "KMeansDistortionMapReduce.Jump.CountOfCentroids" -> May be
* different from actual.
* @formatter:on
* @see CentroidManagerGeoWave
*/
public class KMeansDistortionMapReduce
{
protected static final Logger LOGGER = LoggerFactory.getLogger(KMeansDistortionMapReduce.class);
public static class KMeansDistortionMapper extends
GeoWaveWritableInputMapper<Text, CountofDoubleWritable>
{
private NestedGroupCentroidAssignment<Object> nestedGroupCentroidAssigner;
private final Text outputKeyWritable = new Text(
"1");
private final CountofDoubleWritable outputValWritable = new CountofDoubleWritable();
private CentroidExtractor<Object> centroidExtractor;
private AnalyticItemWrapperFactory<Object> itemWrapperFactory;
AssociationNotification<Object> centroidAssociationFn = new AssociationNotification<Object>() {
@Override
public void notify(
final CentroidPairing<Object> pairing ) {
outputKeyWritable.set(pairing.getCentroid().getGroupID());
final double extraFromItem[] = pairing.getPairedItem().getDimensionValues();
final double extraCentroid[] = pairing.getCentroid().getDimensionValues();
final Point p = centroidExtractor.getCentroid(pairing.getPairedItem().getWrappedItem());
final Point centroid = centroidExtractor.getCentroid(pairing.getCentroid().getWrappedItem());
// calculate error for dp
// using identity matrix for the common covariance, therefore
// E[(p - c)^-1 * cov * (p - c)] => (px - cx)^2 + (py - cy)^2
double expectation = 0.0;
for (int i = 0; i < extraCentroid.length; i++) {
expectation += Math.pow(
extraFromItem[i] - extraCentroid[i],
2);
}
expectation += (Math.pow(
p.getCoordinate().x - centroid.getCoordinate().x,
2) + Math.pow(
p.getCoordinate().y - centroid.getCoordinate().y,
2));
// + Math.pow(
// p.getCoordinate().z - centroid.getCoordinate().z,
// 2));
outputValWritable.set(
expectation,
1);
}
};
@Override
protected void mapNativeValue(
final GeoWaveInputKey key,
final Object value,
final org.apache.hadoop.mapreduce.Mapper<GeoWaveInputKey, ObjectWritable, Text, CountofDoubleWritable>.Context context )
throws IOException,
InterruptedException {
nestedGroupCentroidAssigner.findCentroidForLevel(
itemWrapperFactory.create(value),
centroidAssociationFn);
context.write(
outputKeyWritable,
outputValWritable);
}
@SuppressWarnings("unchecked")
@Override
protected void setup(
final Mapper<GeoWaveInputKey, ObjectWritable, Text, CountofDoubleWritable>.Context context )
throws IOException,
InterruptedException {
super.setup(context);
final ConfigurationWrapper config = new JobContextConfigurationWrapper(
context,
KMeansDistortionMapReduce.LOGGER);
try {
nestedGroupCentroidAssigner = new NestedGroupCentroidAssignment<Object>(
config);
}
catch (final Exception e1) {
throw new IOException(
e1);
}
try {
centroidExtractor = config.getInstance(
CentroidParameters.Centroid.EXTRACTOR_CLASS,
KMeansDistortionMapReduce.class,
CentroidExtractor.class,
SimpleFeatureCentroidExtractor.class);
}
catch (final Exception e1) {
throw new IOException(
e1);
}
try {
itemWrapperFactory = config.getInstance(
CentroidParameters.Centroid.WRAPPER_FACTORY_CLASS,
KMeansDistortionMapReduce.class,
AnalyticItemWrapperFactory.class,
SimpleFeatureItemWrapperFactory.class);
}
catch (final Exception e1) {
throw new IOException(
e1);
}
}
}
public static class KMeansDistorationCombiner extends
Reducer<Text, CountofDoubleWritable, Text, CountofDoubleWritable>
{
final CountofDoubleWritable outputValue = new CountofDoubleWritable();
@Override
public void reduce(
final Text key,
final Iterable<CountofDoubleWritable> values,
final Reducer<Text, CountofDoubleWritable, Text, CountofDoubleWritable>.Context context )
throws IOException,
InterruptedException {
double expectation = 0;
double ptCount = 0;
for (final CountofDoubleWritable value : values) {
expectation += value.getValue();
ptCount += value.getCount();
}
outputValue.set(
expectation,
ptCount);
context.write(
key,
outputValue);
}
}
public static class KMeansDistortionReduce extends
Reducer<Text, CountofDoubleWritable, Text, Mutation>
{
private String expectedK = null;
final protected Text output = new Text(
"");
private CentroidManagerGeoWave<Object> centroidManager;
@Override
public void reduce(
final Text key,
final Iterable<CountofDoubleWritable> values,
final Reducer<Text, CountofDoubleWritable, Text, Mutation>.Context context )
throws IOException,
InterruptedException {
double expectation = 0.0;
final List<AnalyticItemWrapper<Object>> centroids = centroidManager.getCentroidsForGroup(key.toString());
// it is possible that the number of items in a group are smaller
// than the cluster
final String kCount = expectedK == null ? Integer.toString(centroids.size()) : expectedK;
if (centroids.size() == 0) {
return;
}
final double numDimesions = 2 + centroids.get(
0).getExtraDimensions().length;
double ptCount = 0;
for (final CountofDoubleWritable value : values) {
expectation += value.getValue();
ptCount += value.getCount();
}
if (ptCount > 0) {
expectation /= ptCount;
final Double distortion = Math.pow(
expectation / numDimesions,
-(numDimesions / 2));
// key: group ID | "DISTORTION" | K
// value: distortion value
final Mutation m = new Mutation(
key.toString());
m.put(
new Text(
"dt"),
new Text(
kCount),
new Value(
distortion.toString().getBytes(
StringUtils.UTF8_CHAR_SET)));
// write distortion to accumulo, defaults to table given to
// AccumuloOutputFormat, in driver
context.write(
output, // default table
m);
}
}
@Override
protected void setup(
final Reducer<Text, CountofDoubleWritable, Text, Mutation>.Context context )
throws IOException,
InterruptedException {
super.setup(context);
final ConfigurationWrapper config = new JobContextConfigurationWrapper(
context,
KMeansDistortionMapReduce.LOGGER);
final int k = config.getInt(
JumpParameters.Jump.COUNT_OF_CENTROIDS,
KMeansDistortionMapReduce.class,
-1);
if (k > 0) {
expectedK = Integer.toString(k);
}
try {
centroidManager = new CentroidManagerGeoWave<Object>(
config);
}
catch (final Exception e) {
KMeansDistortionMapReduce.LOGGER.warn(
"Unable to initialize centroid manager",
e);
throw new IOException(
"Unable to initialize centroid manager");
}
}
}
}
| viggyprabhu/geowave | geowave-analytics/src/main/java/mil/nga/giat/geowave/analytics/kmeans/mapreduce/KMeansDistortionMapReduce.java | Java | apache-2.0 | 9,638 |
/*
* Copyright (c) 2015-2016 Tapglue (https://www.tapglue.com/). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.tapglue.managers;
import android.content.Context;
import android.content.SharedPreferences;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.tapglue.Tapglue;
import com.tapglue.model.TGEventsList;
import com.tapglue.model.TGFeed;
import com.tapglue.model.TGFeedCount;
import com.tapglue.model.TGPostsList;
import com.tapglue.model.queries.TGQuery;
import com.tapglue.networking.requests.TGRequestCallback;
import com.tapglue.networking.requests.TGRequestErrorType;
public class TGFeedManagerImpl extends AbstractTGManager implements TGFeedManager {
private static final String CACHE_KEY = "FEED_CACHE";
public TGFeedManagerImpl(Tapglue instance) {
super(instance);
}
/**
* Get feed from cache
*
* @param callback
*/
@Override
public void cachedFeedForCurrentUser(@NonNull final TGRequestCallback<TGFeed> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
getCachedFeedIfAvailable(callback);
}
/**
* Return cached feed
*
* @param callback
*/
@Override
public void getCachedFeedIfAvailable(@NonNull final TGRequestCallback<TGFeed> callback) {
SharedPreferences cache = instance.getContext().getSharedPreferences(TGFeedManagerImpl.class.toString(), Context.MODE_PRIVATE);
if (!cache.contains(CACHE_KEY)) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.NO_CACHE_OBJECT));
return;
}
TGFeed feed = new Gson().fromJson(cache.getString(CACHE_KEY, null), new TypeToken<TGFeed>() {}.getType());
callback.onRequestFinished(feed, false);
}
@Override
public void retrieveEventsFeedForCurrentUser(@NonNull final TGRequestCallback<TGEventsList> callback) {
retrieveEventsFeedForCurrentUser(null, callback);
}
@Override
public void retrieveEventsFeedForCurrentUser(@Nullable TGQuery whereParameters, @NonNull final TGRequestCallback<TGEventsList> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getEvents(whereParameters, callback);
}
@Override
public void retrieveEventsForCurrentUser(@NonNull final TGRequestCallback<TGEventsList> callback) {
retrieveEventsForCurrentUser(null, callback);
}
@Override
public void retrieveEventsForCurrentUser(@Nullable TGQuery whereParameters, @NonNull final TGRequestCallback<TGEventsList> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getEvents(whereParameters, callback);
}
@Override
public void retrieveEventsForUser(@NonNull Long userId, @NonNull final TGRequestCallback<TGEventsList> callback) {
retrieveEventsForUser(userId, null, callback);
}
@Override
public void retrieveEventsForUser(@NonNull Long userId, TGQuery whereParameters, @NonNull final TGRequestCallback<TGEventsList> callback) {
if (userId == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.NULL_INPUT));
return;
}
else if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getEvents(userId, whereParameters, callback);
}
@Override
public void retrieveNewsFeedForCurrentUser(@NonNull final TGRequestCallback<TGFeed> callback) {
retrieveNewsFeedForCurrentUser(null, callback);
}
@Override
public void retrieveNewsFeedForCurrentUser(@Nullable TGQuery whereParameters, @NonNull final TGRequestCallback<TGFeed> callback) {
instance.createRequest().getFeed(whereParameters, new TGRequestCallback<TGFeed>() {
@Override
public boolean callbackIsEnabled() {
return callback.callbackIsEnabled();
}
@Override
public void onRequestError(TGRequestErrorType cause) {
callback.onRequestError(cause);
}
@Override
public void onRequestFinished(TGFeed output, boolean changeDoneOnline) {
saveFeedToCache(output);
callback.onRequestFinished(output, changeDoneOnline);
}
});
}
@Override
public void retrievePostsFeedForCurrentUser(@NonNull final TGRequestCallback<TGPostsList> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getMyPosts(callback);
}
@Override
public void retrievePostsForCurrentUser(@NonNull final TGRequestCallback<TGPostsList> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getMyPosts(callback);
}
@Override
public void retrievePostsForUser(@NonNull Long userId, @NonNull final TGRequestCallback<TGPostsList> callback) {
if (userId == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.NULL_INPUT));
return;
}
else if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getUserPosts(userId, callback);
}
@Override
public void retrieveUnreadCountForCurrentUser(@NonNull final TGRequestCallback<TGFeedCount> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getFeedCount(callback);
}
@Override
public void retrieveUnreadFeedForCurrentUser(@NonNull final TGRequestCallback<TGFeed> callback) {
if (instance.getUserManager().getCurrentUser() == null) {
callback.onRequestError(new TGRequestErrorType(TGRequestErrorType.ErrorType.USER_NOT_LOGGED_IN));
return;
}
instance.createRequest().getUnreadFeed(callback);
}
/**
* Save feed to cache
*
* @param output
*/
private void saveFeedToCache(@Nullable TGFeed output) {
SharedPreferences cache = instance.getContext().getSharedPreferences(TGFeedManagerImpl.class.toString(), Context.MODE_PRIVATE);
if (output == null) {
if (cache.contains(CACHE_KEY)) {
cache.edit().remove(CACHE_KEY).apply();
}
}
else {
cache.edit().putString(CACHE_KEY, new Gson().toJson(output, new TypeToken<TGFeed>() {
}.getType())).apply();
}
}
}
| tapglue/android_sdk | v1/tapglue-android-sdk/tapglue-android-sdk/src/main/java/com/tapglue/managers/TGFeedManagerImpl.java | Java | apache-2.0 | 8,313 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Defines various data containers for plotting a transect.
This file is not used in the current version of `geotransect` but is kept here
in case it's useful later.
:copyright: 2015 Agile Geoscience
:license: Apache 2.0
"""
import sys
import os
import numpy as np
from scipy import fft
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import obspy
from plot_utils import add_subplot_axes
filename = sys.argv[1]
segyfile = os.path.basename(filename)
# Read all traces.
section = obspy.read(filename)
r_elevs = []
s_elevs = []
esp = [] # energy source point number
ens = [] # ensemble number
for t in section.traces:
nsamples = t.stats.segy.trace_header.number_of_samples_in_this_trace
dt = t.stats.segy.trace_header.sample_interval_in_ms_for_this_trace
if dt > 100:
dt /= 1000.
r_elevs.append(t.stats.segy.trace_header.datum_elevation_at_receiver_group)
s_elevs.append(t.stats.segy.trace_header.receiver_group_elevation)
esp.append(t.stats.segy.trace_header.energy_source_point_number)
ens.append(t.stats.segy.trace_header.ensemble_number)
ntraces = len(section.traces)
tbase = np.arange(0, nsamples * dt, dt)
tstart = 0
tend = tbase[-1]
aspect = float(ntraces) / float(nsamples)
nf = 1.0
print 'ntraces', ntraces
print 'nsamples', nsamples
print 'dt', dt/nf
data = np.zeros((nsamples, ntraces))
for i, trace in enumerate(section.traces):
data[:, i] = trace.data
line_extents = {'first_trace': 1,
'last_trace': ntraces,
'start_time': tstart,
'end_time': tend
}
clip_val = np.percentile(data, 99.0)
print "clip_val", clip_val
print "max_val", np.amax(data)
print "min_val", np.amin(data)
print "tstart", tstart
print "tend", tend
largest = max(np.amax(data), abs(np.amin(data)))
# MAIN PLOT
h = (tend-tstart) / 250.0
w = ntraces / 250.0
fig = plt.figure(figsize=(10, 10), facecolor='w')
# Seismic data
ax = fig.add_axes([0.05, 0.05, 0.9, 0.95])
im = ax.imshow(data, cmap=cm.gray, origin='upper',
vmin=-clip_val,
vmax=clip_val,
extent=(line_extents['first_trace'],
line_extents['last_trace'],
line_extents['end_time'],
line_extents['start_time']),
aspect = aspect * 0.5
)
ax.set_ylabel('Two-way time [ms]')
ax.set_xlabel('Trace no.')
ax.grid()
ax.set_title(segyfile)
# Colourbar
extreme = max(np.amax(data), abs(np.amin(data)))
colorbar_ax = add_subplot_axes(ax, [0.075, 0.075, 0.025, 0.15])
fig.colorbar(im, cax=colorbar_ax)
colorbar_ax.text(1.15, 1.1, '%3.0f' % -extreme,
transform=colorbar_ax.transAxes,
ha='left',
va='top')
colorbar_ax.text(1.15, -0.05, '%3.0f' % extreme,
transform=colorbar_ax.transAxes,
ha='left', fontsize=10)
colorbar_ax.set_axis_off()
# Power spectrum
S = abs(fft(data[:, 1]))
faxis = np.fft.fftfreq(len(data[:, 1]), d=(1/nf)*dt*1e-6)
spec_ax = add_subplot_axes(ax, [0.50, 0.075, 0.2, 0.15])
spec_ax.plot(faxis[:len(faxis)//4],
np.log10(S[0:len(faxis)//4]),
'b', lw=2)
spec_ax.set_xlabel('frequency [Hz]', fontsize=10)
spec_ax.set_xticklabels([0, 100, 200, 300], fontsize=10)
# spec_ax.set_xticklabels(spec_ax.get_xticks(), fontsize=10)
spec_ax.set_yticklabels(spec_ax.get_yticks(), fontsize=10)
spec_ax.set_yticks([])
spec_ax.set_yticklabels([])
spec_ax.text(.95, .9, 'Power spectrum',
horizontalalignment='right',
transform=spec_ax.transAxes, fontsize=10
)
spec_ax.grid('on')
# Histogram
hist_ax = add_subplot_axes(ax, [0.75, 0.075, 0.2, 0.15])
hist_line = hist_ax.hist(np.ravel(data),
bins=int(100.0 / (clip_val/largest)))
hist_ax.set_xlim(-clip_val, clip_val)
# hist_ax.set_xticklabels([])
hist_ax.set_yticks([])
hist_ax.set_xticklabels([])
hist_ax.set_ylim(hist_ax.get_ylim()[0], hist_ax.get_ylim()[1]),
hist_ax.set_yticks([])
hist_ax.text(.95, .9, 'Histogram',
horizontalalignment='right',
transform=hist_ax.transAxes, fontsize=10
)
plt.show()
| kinverarity1/geotransect | profile_plot.py | Python | apache-2.0 | 4,222 |
#
# Cookbook Name:: icinga2
# Resource:: idomysqlconnection
#
# Copyright 2014, Virender Khatri
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions :create, :delete
default_action :create
attribute :library, :kind_of => String, :default => 'db_ido_mysql'
attribute :host, :kind_of => String, :default => 'localhost'
attribute :port, :kind_of => [String, Integer], :default => '3306'
attribute :user, :kind_of => String, :default => 'icinga'
attribute :password, :kind_of => String, :default => 'icinga'
attribute :database, :kind_of => String, :default => 'icinga'
attribute :table_prefix, :kind_of => String, :default => 'icinga_'
attribute :instance_name, :kind_of => String, :default => 'default'
attribute :instance_description, :kind_of => String, :default => nil
attribute :enable_ha, :kind_of => [TrueClass, FalseClass], :default => nil
attribute :failover_timeout, :kind_of => String, :default => '60s'
attribute :cleanup, :kind_of => Hash, :default => nil
attribute :categories, :kind_of => Array, :default => nil
| vkhatri/chef-icinga2 | resources/idomysqlconnection.rb | Ruby | apache-2.0 | 1,543 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.util;
import java.util.Comparator;
public class CaseInsensitiveStringComparator implements Comparator<String> {
public final static CaseInsensitiveStringComparator instance = new CaseInsensitiveStringComparator();
@Override
public int compare(String s1, String s2) {
if (s1 == null && s2 == null) return 0;
if (s1 == null) return -1;
if (s2 == null) return 1;
return s1.toLowerCase().compareTo(s2.toLowerCase());
}
} | xasx/assertj-core | src/test/java/org/assertj/core/util/CaseInsensitiveStringComparator.java | Java | apache-2.0 | 1,076 |
# Copyright:: Copyright (c) 2012, 2014 Megam Systems
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Megam
class Components < Megam::ServerAPI
def initialize(email=nil, api_key=nil, host=nil)
@id = nil
@name =nil
@tosca_type = nil
@inputs = []
@outputs = []
@artifacts = {}
@artifact_type = nil
@content = nil
@artifact_requirements = []
@related_components = []
@operations = []
@status = nil
@created_at = nil
super(email, api_key, host)
end
def components
self
end
def id(arg=nil)
if arg != nil
@id = arg
else
@id
end
end
def name(arg=nil)
if arg != nil
@name = arg
else
@name
end
end
def tosca_type(arg=nil)
if arg != nil
@tosca_type = arg
else
@tosca_type
end
end
def inputs(arg=[])
if arg != []
@inputs = arg
else
@inputs
end
end
def outputs(arg=[])
if arg != []
@outputs = arg
else
@outputs
end
end
def artifacts(arg=nil)
if arg != nil
@artifacts = arg
else
@artifacts
end
end
def artifact_type(arg=nil)
if arg != nil
@artifact_type = arg
else
@artifact_type
end
end
def content(arg=nil)
if arg != nil
@content = arg
else
@content
end
end
def artifact_requirements(arg=[])
if arg != []
@artifact_requirements = arg
else
@artifact_requirements
end
end
def related_components(arg=[])
if arg != []
@related_components = arg
else
@related_components
end
end
def operations(arg=[])
if arg != []
@operations = arg
else
@operations
end
end
def status(arg=nil)
if arg != nil
@status = arg
else
@status
end
end
def created_at(arg=nil)
if arg != nil
@created_at = arg
else
@created_at
end
end
def error?
crocked = true if (some_msg.has_key?(:msg_type) && some_msg[:msg_type] == "error")
end
# Transform the ruby obj -> to a Hash
def to_hash
index_hash = Hash.new
index_hash["json_claz"] = self.class.name
index_hash["id"] = id
index_hash["name"] = name
index_hash["tosca_type"] = tosca_type
index_hash["inputs"] = inputs
index_hash["outputs"] = outputs
index_hash["artifacts"] = artifacts
index_hash["related_components"] = related_components
index_hash["operations"] = operations
index_hash["status"] = status
index_hash["created_at"] = created_at
index_hash
end
# Serialize this object as a hash: called from JsonCompat.
# Verify if this called from JsonCompat during testing.
def to_json(*a)
for_json.to_json(*a)
end
def for_json
result = {
"id" => id,
"name" => name,
"tosca_type" => tosca_type,
"inputs" => inputs,
"outputs" => outputs,
"artifacts" => artifacts,
"related_components" => related_components,
"operations" => operations,
"status" => status,
"created_at" => created_at
}
result
end
def self.json_create(o)
asm = new
asm.id(o["id"]) if o.has_key?("id")
asm.name(o["name"]) if o.has_key?("name")
asm.tosca_type(o["tosca_type"]) if o.has_key?("tosca_type")
asm.inputs(o["inputs"]) if o.has_key?("inputs")
asm.outputs(o["outputs"]) if o.has_key?("outputs")
ar = o["artifacts"]
asm.artifacts[:artifact_type] = ar["artifact_type"] if ar && ar.has_key?("artifact_type")
asm.artifacts[:content] = ar["content"] if ar && ar.has_key?("content")
asm.artifacts[:artifact_requirements] = ar["artifact_requirements"] if ar && ar.has_key?("artifact_requirements")
asm.related_components(o["related_components"]) if o.has_key?("related_components")
asm.operations(o["operations"]) if o.has_key?("operations")
asm.status(o["status"]) if o.has_key?("status")
asm.created_at(o["created_at"]) if o.has_key?("created_at")
asm
end
def self.from_hash(o,tmp_email=nil, tmp_api_key=nil, tmp_host=nil)
asm = self.new(tmp_email, tmp_api_key, tmp_host)
asm.from_hash(o)
asm
end
def from_hash(o)
@id = o["id"] if o.has_key?("id")
@name = o["name"] if o.has_key?("name")
@tosca_type = o["tosca_type"] if o.has_key?("tosca_type")
@inputs = o["inputs"] if o.has_key?("inputs")
@outputs = o["outputs"] if o.has_key?("outputs")
@artifacts = o["artifacts"] if o.has_key?("artifacts")
@related_components = o["related_components"] if o.has_key?("related_components")
@operations = o["operations"] if o.has_key?("operations")
@status = o["status"] if o.has_key?("status")
@created_at = o["created_at"] if o.has_key?("created_at")
self
end
def self.create(params)
asm = from_hash(params, params["email"], params["api_key"], params["host"])
asm.create
end
# Load a account by email_p
def self.show(params)
asm = self.new(params["email"], params["api_key"], params["host"])
asm.megam_rest.get_components(params["id"])
end
def self.update(params)
asm = from_hash(params, params["email"] || params[:email], params["api_key"] || params[:api_key], params["host"] || params[:host])
asm.update
end
# Create the node via the REST API
def update
megam_rest.update_component(to_hash)
end
def to_s
Megam::Stuff.styled_hash(to_hash)
end
end
end | arunkumarsekar06/megam_api | lib/megam/core/components.rb | Ruby | apache-2.0 | 6,597 |
/*
* Copyright 2015 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.apps.iosched.provider;
import android.app.SearchManager;
import android.content.ContentProvider;
import android.content.ContentProviderOperation;
import android.content.ContentProviderResult;
import android.content.ContentValues;
import android.content.Context;
import android.content.OperationApplicationException;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.database.sqlite.SQLiteConstraintException;
import android.database.sqlite.SQLiteDatabase;
import android.net.Uri;
import android.os.ParcelFileDescriptor;
import android.provider.BaseColumns;
import android.text.TextUtils;
import android.util.Log;
import com.google.samples.apps.iosched.Config;
import com.google.samples.apps.iosched.appwidget.ScheduleWidgetProvider;
import com.google.samples.apps.iosched.provider.ScheduleContract.Announcements;
import com.google.samples.apps.iosched.provider.ScheduleContract.Blocks;
import com.google.samples.apps.iosched.provider.ScheduleContract.Feedback;
import com.google.samples.apps.iosched.provider.ScheduleContract.HashtagColumns;
import com.google.samples.apps.iosched.provider.ScheduleContract.Hashtags;
import com.google.samples.apps.iosched.provider.ScheduleContract.MyFeedbackSubmitted;
import com.google.samples.apps.iosched.provider.ScheduleContract.MyReservationColumns;
import com.google.samples.apps.iosched.provider.ScheduleContract.MyReservations;
import com.google.samples.apps.iosched.provider.ScheduleContract.MySchedule;
import com.google.samples.apps.iosched.provider.ScheduleContract.MyScheduleColumns;
import com.google.samples.apps.iosched.provider.ScheduleContract.MyViewedVideos;
import com.google.samples.apps.iosched.provider.ScheduleContract.Rooms;
import com.google.samples.apps.iosched.provider.ScheduleContract.SearchSuggest;
import com.google.samples.apps.iosched.provider.ScheduleContract.SearchTopicsSessions;
import com.google.samples.apps.iosched.provider.ScheduleContract.Sessions;
import com.google.samples.apps.iosched.provider.ScheduleContract.Speakers;
import com.google.samples.apps.iosched.provider.ScheduleContract.Tags;
import com.google.samples.apps.iosched.provider.ScheduleContract.VideoColumns;
import com.google.samples.apps.iosched.provider.ScheduleContract.Videos;
import com.google.samples.apps.iosched.provider.ScheduleDatabase.SessionsSearchColumns;
import com.google.samples.apps.iosched.provider.ScheduleDatabase.SessionsSpeakers;
import com.google.samples.apps.iosched.provider.ScheduleDatabase.Tables;
import com.google.samples.apps.iosched.settings.SettingsUtils;
import com.google.samples.apps.iosched.util.AccountUtils;
import com.google.samples.apps.iosched.util.SelectionBuilder;
import java.io.FileDescriptor;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static com.google.samples.apps.iosched.util.LogUtils.LOGD;
import static com.google.samples.apps.iosched.util.LogUtils.LOGE;
import static com.google.samples.apps.iosched.util.LogUtils.LOGV;
import static com.google.samples.apps.iosched.util.LogUtils.makeLogTag;
/**
* {@link android.content.ContentProvider} that stores {@link ScheduleContract} data. Data is
* usually inserted by {@link com.google.samples.apps.iosched.sync.SyncHelper}, and queried using
* {@link android.support.v4.app.LoaderManager} pattern.
*/
public class ScheduleProvider extends ContentProvider {
private static final String TAG = makeLogTag(ScheduleProvider.class);
private ScheduleDatabase mOpenHelper;
private ScheduleProviderUriMatcher mUriMatcher;
/**
* Providing important state information to be included in bug reports.
*
* !!! Remember !!! Any important data logged to {@code writer} shouldn't contain personally
* identifiable information as it can be seen in bugreports.
*/
@Override
public void dump(FileDescriptor fd, PrintWriter writer, String[] args) {
Context context = getContext();
// Using try/catch block in case there are issues retrieving information to log.
try {
// Calling append in multiple calls is typically better than creating net new strings to
// pass to method invocations.
writer.print("Last sync attempted: ");
writer.println(new java.util.Date(SettingsUtils.getLastSyncAttemptedTime(context)));
writer.print("Last sync successful: ");
writer.println(new java.util.Date(SettingsUtils.getLastSyncSucceededTime(context)));
writer.print("Current sync interval: ");
writer.println(SettingsUtils.getCurSyncInterval(context));
writer.print("Is an account active: ");
writer.println(AccountUtils.hasActiveAccount(context));
boolean canGetAuthToken = !TextUtils.isEmpty(AccountUtils.getAuthToken(context));
writer.print("Can an auth token be retrieved: ");
writer.println(canGetAuthToken);
} catch (Exception exception) {
writer.append("Exception while dumping state: ");
exception.printStackTrace(writer);
}
}
@Override
public boolean onCreate() {
mOpenHelper = new ScheduleDatabase(getContext());
mUriMatcher = new ScheduleProviderUriMatcher();
return true;
}
private void deleteDatabase() {
// TODO: wait for content provider operations to finish, then tear down
mOpenHelper.close();
Context context = getContext();
ScheduleDatabase.deleteDatabase(context);
mOpenHelper = new ScheduleDatabase(getContext());
}
/** {@inheritDoc} */
@Override
public String getType(Uri uri) {
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
return matchingUriEnum.contentType;
}
/**
* Returns a tuple of question marks. For example, if {@code count} is 3, returns "(?,?,?)".
*/
private String makeQuestionMarkTuple(int count) {
if (count < 1) {
return "()";
}
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("(?");
for (int i = 1; i < count; i++) {
stringBuilder.append(",?");
}
stringBuilder.append(")");
return stringBuilder.toString();
}
/**
* Adds the {@code tagsFilter} query parameter to the given {@code builder}. This query
* parameter is used when the user makes a selection containing multiple filters.
*/
private void addTagsFilter(SelectionBuilder builder, String tagsFilter, String numCategories) {
// Note: for context, remember that session queries are done on a join of sessions
// and the sessions_tags relationship table, and are GROUP'ed BY the session ID.
String[] requiredTags = tagsFilter.split(",");
if (requiredTags.length == 0) {
// filtering by 0 tags -- no-op
return;
} else if (requiredTags.length == 1) {
// filtering by only one tag, so a simple WHERE clause suffices
builder.where(Tags.TAG_ID + "=?", requiredTags[0]);
} else {
// Filtering by multiple tags, so we must add a WHERE clause with an IN operator,
// and add a HAVING statement to exclude groups that fall short of the number
// of required tags. For example, if requiredTags is { "X", "Y", "Z" }, and a certain
// session only has tags "X" and "Y", it will be excluded by the HAVING statement.
int categories = 1;
if (numCategories != null && TextUtils.isDigitsOnly(numCategories)) {
try {
categories = Integer.parseInt(numCategories);
LOGD(TAG, "Categories being used " + categories);
} catch (Exception ex) {
LOGE(TAG, "exception parsing categories ", ex);
}
}
String questionMarkTuple = makeQuestionMarkTuple(requiredTags.length);
builder.where(Tags.TAG_ID + " IN " + questionMarkTuple, requiredTags);
builder.having(
"COUNT(" + Qualified.SESSIONS_SESSION_ID + ") >= " + categories);
}
}
/** {@inheritDoc} */
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
final SQLiteDatabase db = mOpenHelper.getReadableDatabase();
String tagsFilter = uri.getQueryParameter(Sessions.QUERY_PARAMETER_TAG_FILTER);
String categories = uri.getQueryParameter(Sessions.QUERY_PARAMETER_CATEGORIES);
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
// Avoid the expensive string concatenation below if not loggable.
if (Log.isLoggable(TAG, Log.VERBOSE)) {
LOGV(TAG, "uri=" + uri + " code=" + matchingUriEnum.code + " proj=" +
Arrays.toString(projection) + " selection=" + selection + " args="
+ Arrays.toString(selectionArgs) + ")");
}
switch (matchingUriEnum) {
default: {
// Most cases are handled with simple SelectionBuilder.
final SelectionBuilder builder = buildExpandedSelection(uri, matchingUriEnum.code);
// If a special filter was specified, try to apply it.
if (!TextUtils.isEmpty(tagsFilter) && !TextUtils.isEmpty(categories)) {
addTagsFilter(builder, tagsFilter, categories);
}
boolean distinct = ScheduleContractHelper.isQueryDistinct(uri);
Cursor cursor = builder
.where(selection, selectionArgs)
.query(db, distinct, projection, sortOrder, null);
Context context = getContext();
if (null != context) {
cursor.setNotificationUri(context.getContentResolver(), uri);
}
return cursor;
}
case SEARCH_SUGGEST: {
final SelectionBuilder builder = new SelectionBuilder();
// Adjust incoming query to become SQL text match.
selectionArgs[0] = selectionArgs[0] + "%";
builder.table(Tables.SEARCH_SUGGEST);
builder.where(selection, selectionArgs);
builder.map(SearchManager.SUGGEST_COLUMN_QUERY,
SearchManager.SUGGEST_COLUMN_TEXT_1);
projection = new String[]{
BaseColumns._ID,
SearchManager.SUGGEST_COLUMN_TEXT_1,
SearchManager.SUGGEST_COLUMN_QUERY
};
final String limit = uri.getQueryParameter(SearchManager.SUGGEST_PARAMETER_LIMIT);
return builder.query(db, false, projection, SearchSuggest.DEFAULT_SORT, limit);
}
case SEARCH_TOPICS_SESSIONS: {
if (selectionArgs == null || selectionArgs.length == 0) {
return createMergedSearchCursor(null, null);
}
String selectionArg = selectionArgs[0] == null ? "" : selectionArgs[0];
// First we query the Tags table to find any tags that match the given query
Cursor tags = query(Tags.CONTENT_URI, SearchTopicsSessions.TOPIC_TAG_PROJECTION,
SearchTopicsSessions.TOPIC_TAG_SELECTION,
new String[] {Config.Tags.CATEGORY_TRACK, selectionArg + "%"},
SearchTopicsSessions.TOPIC_TAG_SORT);
// Then we query the sessions_search table and get a list of sessions that match
// the given keywords.
Cursor search = null;
if (selectionArgs[0] != null) { // dont query if there was no selectionArg.
search = query(ScheduleContract.Sessions.buildSearchUri(selectionArg),
SearchTopicsSessions.SEARCH_SESSIONS_PROJECTION,
null, null,
ScheduleContract.Sessions.SORT_BY_TYPE_THEN_TIME);
}
// Now that we have two cursors, we merge the cursors and return a unified view
// of the two result sets.
return createMergedSearchCursor(tags, search);
}
}
}
/**
* Create a {@link MatrixCursor} given the tags and search cursors.
*
* @param tags Cursor with the projection {@link SearchTopicsSessions#TOPIC_TAG_PROJECTION}.
* @param search Cursor with the projection
* {@link SearchTopicsSessions#SEARCH_SESSIONS_PROJECTION}.
* @return Returns a MatrixCursor always with {@link SearchTopicsSessions#DEFAULT_PROJECTION}
*/
private Cursor createMergedSearchCursor(Cursor tags, Cursor search) {
// How big should our MatrixCursor be?
int maxCount = (tags == null ? 0 : tags.getCount()) +
(search == null ? 0 : search.getCount());
MatrixCursor matrixCursor = new MatrixCursor(
SearchTopicsSessions.DEFAULT_PROJECTION, maxCount);
// Iterate over the tags cursor and add rows.
if (tags != null && tags.moveToFirst()) {
do {
matrixCursor.addRow(
new Object[]{
tags.getLong(0),
tags.getString(1), /*tag_id*/
"{" + tags.getString(2) + "}", /*search_snippet*/
1}); /*is_topic_tag*/
} while (tags.moveToNext());
}
// Iterate over the search cursor and add rows.
if (search != null && search.moveToFirst()) {
do {
matrixCursor.addRow(
new Object[]{
search.getLong(0),
search.getString(1),
search.getString(2), /*search_snippet*/
0}); /*is_topic_tag*/
} while (search.moveToNext());
}
return matrixCursor;
}
/** {@inheritDoc} */
@Override
public Uri insert(Uri uri, ContentValues values) {
LOGV(TAG, "insert(uri=" + uri + ", values=" + values.toString()
+ ", account=" + getCurrentAccountName(uri, false) + ")");
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
if (matchingUriEnum.table != null) {
try {
db.insertOrThrow(matchingUriEnum.table, null, values);
notifyChange(uri);
} catch (SQLiteConstraintException exception) {
// Leaving this here as it's handy to to breakpoint on this throw when debugging a
// bootstrap file issue.
throw exception;
}
}
switch (matchingUriEnum) {
case BLOCKS: {
return Blocks.buildBlockUri(values.getAsString(Blocks.BLOCK_ID));
}
case CARDS: {
return ScheduleContract.Cards.buildCardUri(values.getAsString(
ScheduleContract.Cards.CARD_ID));
}
case TAGS: {
return Tags.buildTagUri(values.getAsString(Tags.TAG_ID));
}
case ROOMS: {
return Rooms.buildRoomUri(values.getAsString(Rooms.ROOM_ID));
}
case SESSIONS: {
return Sessions.buildSessionUri(values.getAsString(Sessions.SESSION_ID));
}
case SESSIONS_ID_SPEAKERS: {
return Speakers.buildSpeakerUri(values.getAsString(SessionsSpeakers.SPEAKER_ID));
}
case SESSIONS_ID_TAGS: {
return Tags.buildTagUri(values.getAsString(Tags.TAG_ID));
}
case SESSIONS_ID_RELATED: {
values.put(Sessions.SESSION_ID, Sessions.getSessionId(uri));
db.insertOrThrow(Tables.RELATED_SESSIONS, null, values);
notifyChange(uri);
return uri;
}
case MY_SCHEDULE: {
values.put(MySchedule.MY_SCHEDULE_ACCOUNT_NAME, getCurrentAccountName(uri, false));
db.insertOrThrow(Tables.MY_SCHEDULE, null, values);
notifyChange(uri);
Uri sessionUri = Sessions.buildSessionUri(
values.getAsString(MyScheduleColumns.SESSION_ID));
notifyChange(sessionUri);
// Queries for sessions in user's schedule are affected by this change.
notifyChange(Sessions.CONTENT_MY_SCHEDULE_URI);
return sessionUri;
}
case MY_RESERVATIONS: {
values.put(MyReservations.MY_RESERVATION_ACCOUNT_NAME, getCurrentAccountName(uri, false));
db.insertOrThrow(Tables.MY_RESERVATIONS, null, values);
notifyChange(uri);
Uri sessionUri = Sessions.buildSessionUri(
values.getAsString(MyReservationColumns.SESSION_ID));
notifyChange(sessionUri);
// Queries for sessions in user's schedule are affected by this change.
notifyChange(Sessions.CONTENT_MY_SCHEDULE_URI);
return sessionUri;
}
case MY_VIEWED_VIDEOS: {
values.put(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME,
getCurrentAccountName(uri, false));
db.insertOrThrow(Tables.MY_VIEWED_VIDEO, null, values);
notifyChange(uri);
Uri videoUri = Videos.buildVideoUri(
values.getAsString(MyViewedVideos.VIDEO_ID));
notifyChange(videoUri);
return videoUri;
}
case MY_FEEDBACK_SUBMITTED: {
values.put(MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME,
getCurrentAccountName(uri, false));
db.insertOrThrow(Tables.MY_FEEDBACK_SUBMITTED, null, values);
notifyChange(uri);
Uri sessionUri = Sessions.buildSessionUri(
values.getAsString(MyFeedbackSubmitted.SESSION_ID));
notifyChange(sessionUri);
return sessionUri;
}
case SPEAKERS: {
return Speakers.buildSpeakerUri(values.getAsString(Speakers.SPEAKER_ID));
}
case ANNOUNCEMENTS: {
return Announcements.buildAnnouncementUri(values
.getAsString(Announcements.ANNOUNCEMENT_ID));
}
case SEARCH_SUGGEST: {
return SearchSuggest.CONTENT_URI;
}
case MAPGEOJSON: {
return ScheduleContract.MapGeoJson.buildGeoJsonUri();
}
case MAPTILES: {
return ScheduleContract.MapTiles.buildFloorUri(values.getAsString(
ScheduleContract.MapTiles.TILE_FLOOR));
}
case FEEDBACK_FOR_SESSION: {
return Feedback.buildFeedbackUri(values.getAsString(Feedback.SESSION_ID));
}
case HASHTAGS: {
return Hashtags.buildHashtagUri(values.getAsString(Hashtags.HASHTAG_NAME));
}
case VIDEOS: {
return Videos.buildVideoUri(values.getAsString(Videos.VIDEO_ID));
}
default: {
throw new UnsupportedOperationException("Unknown insert uri: " + uri);
}
}
}
/** {@inheritDoc} */
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
String accountName = getCurrentAccountName(uri, false);
Uri notifyUri = null;
LOGV(TAG, "update(uri=" + uri + ", values=" + values.toString()
+ ", account=" + accountName + ")");
boolean isAccountUpdateAllowed = ScheduleContractHelper.isAccountUpdateAllowed(uri);
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
if (matchingUriEnum == ScheduleUriEnum.SEARCH_INDEX) {
// update the search index
ScheduleDatabase.updateSessionSearchIndex(db);
return 1;
}
final SelectionBuilder builder = buildSimpleSelection(uri);
if (matchingUriEnum == ScheduleUriEnum.SESSIONS_ID_RELATED) {
// update not supported
return 0;
}
if (matchingUriEnum == ScheduleUriEnum.MY_SCHEDULE) {
if (!isAccountUpdateAllowed) {
values.remove(MySchedule.MY_SCHEDULE_ACCOUNT_NAME);
}
builder.where(MySchedule.MY_SCHEDULE_ACCOUNT_NAME + "=?", accountName);
// Also notify session listeners.
notifyUri = Sessions.CONTENT_MY_SCHEDULE_URI;
}
if (matchingUriEnum == ScheduleUriEnum.MY_RESERVATIONS) {
values.remove(MyReservations.MY_RESERVATION_ACCOUNT_NAME);
builder.where(MyReservations.MY_RESERVATION_ACCOUNT_NAME + "=?", accountName);
}
if (matchingUriEnum == ScheduleUriEnum.MY_VIEWED_VIDEOS) {
values.remove(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME);
builder.where(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME + "=?", accountName);
}
if (matchingUriEnum == ScheduleUriEnum.MY_FEEDBACK_SUBMITTED) {
if (!isAccountUpdateAllowed) {
values.remove(MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME);
}
builder.where(MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME + "=?",
accountName);
}
int retVal = builder.where(selection, selectionArgs).update(db, values);
if (retVal > 0) {
notifyChange(uri);
if (null != notifyUri) notifyChange(notifyUri);
}
return retVal;
}
/** {@inheritDoc} */
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
String accountName = getCurrentAccountName(uri, false);
LOGV(TAG, "delete(uri=" + uri + ", account=" + accountName + ")");
if (uri == ScheduleContract.BASE_CONTENT_URI) {
// Handle whole database deletes (e.g. when signing out)
deleteDatabase();
notifyChange(uri);
return 1;
}
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final SelectionBuilder builder = buildSimpleSelection(uri);
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
if (matchingUriEnum == ScheduleUriEnum.MY_SCHEDULE) {
builder.where(MySchedule.MY_SCHEDULE_ACCOUNT_NAME + "=?", accountName);
}
if (matchingUriEnum == ScheduleUriEnum.MY_RESERVATIONS) {
builder.where(MyReservations.MY_RESERVATION_ACCOUNT_NAME + "=?", accountName);
}
if (matchingUriEnum == ScheduleUriEnum.MY_VIEWED_VIDEOS) {
builder.where(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME + "=?", accountName);
}
if (matchingUriEnum == ScheduleUriEnum.MY_FEEDBACK_SUBMITTED) {
builder.where(
MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME + "=?", accountName);
}
int retVal = builder.where(selection, selectionArgs).delete(db);
if (retVal > 0) {
notifyChange(uri);
}
return retVal;
}
/**
* Notifies the system that the given {@code uri} data has changed.
* <p/>
* We only notify changes if the uri wasn't called by the sync adapter, to avoid issuing a large
* amount of notifications while doing a sync. The
* {@link com.google.samples.apps.iosched.sync.ConferenceDataHandler} notifies all top level
* conference paths once the conference data sync is done, and the
* {@link com.google.samples.apps.iosched.sync.userdata.AbstractUserDataSyncHelper} notifies all
* user data related paths once the user data sync is done.
*/
private void notifyChange(Uri uri) {
if (!ScheduleContractHelper.isUriCalledFromSyncAdapter(uri)) {
Context context = getContext();
context.getContentResolver().notifyChange(uri, null);
// Widgets can't register content observers so we refresh widgets separately.
context.sendBroadcast(ScheduleWidgetProvider.getRefreshBroadcastIntent(context, false));
}
}
/**
* Apply the given set of {@link ContentProviderOperation}, executing inside
* a {@link SQLiteDatabase} transaction. All changes will be rolled back if
* any single one fails.
*/
@Override
public ContentProviderResult[] applyBatch(ArrayList<ContentProviderOperation> operations)
throws OperationApplicationException {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
db.beginTransaction();
try {
final int numOperations = operations.size();
final ContentProviderResult[] results = new ContentProviderResult[numOperations];
for (int i = 0; i < numOperations; i++) {
results[i] = operations.get(i).apply(this, results, i);
}
db.setTransactionSuccessful();
return results;
} finally {
db.endTransaction();
}
}
/**
* Build a simple {@link SelectionBuilder} to match the requested
* {@link Uri}. This is usually enough to support {@link #insert},
* {@link #update}, and {@link #delete} operations.
*/
private SelectionBuilder buildSimpleSelection(Uri uri) {
final SelectionBuilder builder = new SelectionBuilder();
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchUri(uri);
// The main Uris, corresponding to the root of each type of Uri, do not have any selection
// criteria so the full table is used. The others apply a selection criteria.
switch (matchingUriEnum) {
case BLOCKS:
case CARDS:
case TAGS:
case ROOMS:
case SESSIONS:
case SPEAKERS:
case ANNOUNCEMENTS:
case MAPGEOJSON:
case MAPTILES:
case SEARCH_SUGGEST:
case HASHTAGS:
case VIDEOS:
return builder.table(matchingUriEnum.table);
case BLOCKS_ID: {
final String blockId = Blocks.getBlockId(uri);
return builder.table(Tables.BLOCKS)
.where(Blocks.BLOCK_ID + "=?", blockId);
}
case TAGS_ID: {
final String tagId = Tags.getTagId(uri);
return builder.table(Tables.TAGS)
.where(Tags.TAG_ID + "=?", tagId);
}
case ROOMS_ID: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.ROOMS)
.where(Rooms.ROOM_ID + "=?", roomId);
}
case SESSIONS_ID: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_SPEAKERS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_TAGS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_TAGS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_RELATED: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.RELATED_SESSIONS)
.where(Sessions.SESSION_ID + "=?", sessionId);
}
case SESSIONS_MY_SCHEDULE: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.MY_SCHEDULE)
.where(ScheduleContract.MyScheduleColumns.SESSION_ID + "=?", sessionId);
}
case MY_SCHEDULE: {
return builder.table(Tables.MY_SCHEDULE)
.where(MySchedule.MY_SCHEDULE_ACCOUNT_NAME + "=?",
getCurrentAccountName(uri, false));
}
case MY_RESERVATIONS: {
return builder.table(Tables.MY_RESERVATIONS)
.where(MyReservations.MY_RESERVATION_ACCOUNT_NAME + "=?",
getCurrentAccountName(uri, false));
}
case MY_VIEWED_VIDEOS: {
return builder.table(Tables.MY_VIEWED_VIDEO)
.where(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME + "=?",
getCurrentAccountName(uri, false));
}
case MY_FEEDBACK_SUBMITTED: {
return builder.table(Tables.MY_FEEDBACK_SUBMITTED)
.where(MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME + "=?",
getCurrentAccountName(uri, false));
}
case SPEAKERS_ID: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SPEAKERS)
.where(Speakers.SPEAKER_ID + "=?", speakerId);
}
case ANNOUNCEMENTS_ID: {
final String announcementId = Announcements.getAnnouncementId(uri);
return builder.table(Tables.ANNOUNCEMENTS)
.where(Announcements.ANNOUNCEMENT_ID + "=?", announcementId);
}
case FEEDBACK_FOR_SESSION: {
final String session_id = Feedback.getSessionId(uri);
return builder.table(Tables.FEEDBACK)
.where(Feedback.SESSION_ID + "=?", session_id);
}
case FEEDBACK_ALL: {
return builder.table(Tables.FEEDBACK);
}
case HASHTAGS_NAME: {
final String hashtagName = Hashtags.getHashtagName(uri);
return builder.table(Tables.HASHTAGS)
.where(Hashtags.HASHTAG_NAME + "=?", hashtagName);
}
case VIDEOS_ID: {
final String videoId = Videos.getVideoId(uri);
return builder.table(Tables.VIDEOS).where(Videos.VIDEO_ID + "=?", videoId);
}
default: {
throw new UnsupportedOperationException("Unknown uri for " + uri);
}
}
}
private String getCurrentAccountName(Uri uri, boolean sanitize) {
String accountName = ScheduleContractHelper.getOverrideAccountName(uri);
if (accountName == null) {
accountName = AccountUtils.getActiveAccountName(getContext());
}
if (sanitize) {
// sanitize accountName when concatenating (http://xkcd.com/327/)
accountName = (accountName != null) ? accountName.replace("'", "''") : null;
}
return accountName;
}
/**
* Build an advanced {@link SelectionBuilder} to match the requested
* {@link Uri}. This is usually only used by {@link #query}, since it
* performs table joins useful for {@link Cursor} data.
*/
private SelectionBuilder buildExpandedSelection(Uri uri, int match) {
final SelectionBuilder builder = new SelectionBuilder();
ScheduleUriEnum matchingUriEnum = mUriMatcher.matchCode(match);
if (matchingUriEnum == null) {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
String accountName = getCurrentAccountName(uri, true);
switch (matchingUriEnum) {
case BLOCKS: {
return builder.table(Tables.BLOCKS);
}
case BLOCKS_BETWEEN: {
final List<String> segments = uri.getPathSegments();
final String startTime = segments.get(2);
final String endTime = segments.get(3);
return builder.table(Tables.BLOCKS)
.where(Blocks.BLOCK_START + ">=?", startTime)
.where(Blocks.BLOCK_START + "<=?", endTime);
}
case BLOCKS_ID: {
final String blockId = Blocks.getBlockId(uri);
return builder.table(Tables.BLOCKS)
.where(Blocks.BLOCK_ID + "=?", blockId);
}
case CARDS: {
return builder.table(Tables.CARDS);
}
case TAGS: {
return builder.table(Tables.TAGS);
}
case TAGS_ID: {
final String tagId = Tags.getTagId(uri);
return builder.table(Tables.TAGS)
.where(Tags.TAG_ID + "=?", tagId);
}
case ROOMS: {
return builder.table(Tables.ROOMS);
}
case ROOMS_ID: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.ROOMS)
.where(Rooms.ROOM_ID + "=?", roomId);
}
case ROOMS_ID_SESSIONS: {
final String roomId = Rooms.getRoomId(uri);
return builder.table(Tables.SESSIONS_JOIN_ROOMS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_ROOM_ID + "=?", roomId)
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS: {
// We query sessions on the joined table of sessions with rooms and tags.
// Since there may be more than one tag per session, we GROUP BY session ID.
// The starred sessions ("my schedule") are associated with a user, so we
// use the current user to select them properly. Reserved sessions are handled
// similarly.
return builder
.table(Tables.SESSIONS_JOIN_ROOMS_TAGS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS_COUNTER: {
return builder
.table(Tables.SESSIONS_JOIN_MYSCHEDULE, accountName, accountName)
.map(Sessions.SESSION_INTERVAL_COUNT, "count(1)")
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.groupBy(Sessions.SESSION_START + ", " + Sessions.SESSION_END);
}
case SESSIONS_MY_SCHEDULE: {
return builder.table(Tables.SESSIONS_JOIN_ROOMS_TAGS_FEEDBACK_MYSCHEDULE,
accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.map(Sessions.HAS_GIVEN_FEEDBACK, Subquery.SESSION_HAS_GIVEN_FEEDBACK)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.where(Sessions.IN_SCHEDULE_SELECTION)
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS_UNSCHEDULED: {
final long[] interval = Sessions.getInterval(uri);
return builder.table(Tables.SESSIONS_JOIN_ROOMS_TAGS_FEEDBACK_MYSCHEDULE,
accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.where(Sessions.NOT_IN_SCHEDULE_SELECTION)
.where(Sessions.SESSION_START + ">=?", String.valueOf(interval[0]))
.where(Sessions.SESSION_START + "<?", String.valueOf(interval[1]))
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS_SEARCH: {
final String query = Sessions.getSearchQuery(uri);
return builder.table(Tables.SESSIONS_SEARCH_JOIN_SESSIONS_ROOMS,
accountName, accountName)
.map(Sessions.SEARCH_SNIPPET, Subquery.SESSIONS_SNIPPET)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.where(SessionsSearchColumns.BODY + " MATCH ?", query);
}
case SESSIONS_AT: {
final List<String> segments = uri.getPathSegments();
final String time = segments.get(2);
return builder.table(Tables.SESSIONS_JOIN_ROOMS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.where(Sessions.SESSION_START + "<=?", time)
.where(Sessions.SESSION_END + ">=?", time);
}
case SESSIONS_ID: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_JOIN_ROOMS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.where(Qualified.SESSIONS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_SPEAKERS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS_JOIN_SPEAKERS)
.mapToTable(Speakers._ID, Tables.SPEAKERS)
.mapToTable(Speakers.SPEAKER_ID, Tables.SPEAKERS)
.where(Qualified.SESSIONS_SPEAKERS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_TAGS: {
final String sessionId = Sessions.getSessionId(uri);
return builder.table(Tables.SESSIONS_TAGS_JOIN_TAGS)
.mapToTable(Tags._ID, Tables.TAGS)
.mapToTable(Tags.TAG_ID, Tables.TAGS)
.where(Qualified.SESSIONS_TAGS_SESSION_ID + "=?", sessionId);
}
case SESSIONS_ID_RELATED: {
final String sessionId = Sessions.getSessionId(uri);
return builder
.table(Tables.SESSIONS_JOIN_ROOMS_TAGS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + ScheduleContract
.MyReservations.MY_RESERVATION_STATUS + ", -1)")
.map(Sessions.HAS_GIVEN_FEEDBACK, Subquery.SESSION_HAS_GIVEN_FEEDBACK)
.where(Subquery.RELATED_SESSIONS_SELECTION, sessionId)
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS_ROOM_AFTER: {
final String room = Sessions.getRoom(uri);
final String time = Sessions.getAfterForRoom(uri);
return builder.table(Tables.SESSIONS_JOIN_ROOMS_TAGS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.where(Qualified.SESSIONS_ROOM_ID + "=?", room)
.where("(" + Sessions.SESSION_START + "<= ? AND " + Sessions.SESSION_END +
" >= ?) OR (" + Sessions.SESSION_START + " >= ?)", time,
time,
time)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + ScheduleContract
.MyReservations.MY_RESERVATION_STATUS + ", -1)")
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SESSIONS_AFTER: {
final String time = Sessions.getAfter(uri);
return builder.table(Tables.SESSIONS_JOIN_ROOMS_TAGS, accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + ScheduleContract
.MyReservations.MY_RESERVATION_STATUS + ", -1)")
.where("(" + Sessions.SESSION_START + "<= ? AND " + Sessions.SESSION_END +
" >= ?) OR (" + Sessions.SESSION_START + " >= ?)", time,
time, time)
.groupBy(Qualified.SESSIONS_SESSION_ID);
}
case SPEAKERS: {
return builder.table(Tables.SPEAKERS);
}
case MY_SCHEDULE: {
// force a where condition to avoid leaking schedule info to another account
// Note that, since SelectionBuilder always join multiple where calls using AND,
// even if malicious code specifying additional conditions on account_name won't
// be able to fetch data from a different account.
return builder.table(Tables.MY_SCHEDULE)
.where(MySchedule.MY_SCHEDULE_ACCOUNT_NAME + "=?",
accountName);
}
case MY_RESERVATIONS: {
// force a where condition to avoid leaking reservation info to another account
// Note that, since SelectionBuilder always join multiple where calls using AND,
// even if malicious code specifying additional conditions on account_name won't
// be able to fetch data from a different account.
return builder.table(Tables.MY_RESERVATIONS)
.where(MyReservations.MY_RESERVATION_ACCOUNT_NAME + "=?",
accountName);
}
case MY_FEEDBACK_SUBMITTED: {
// force a where condition to avoid leaking schedule info to another account
// Note that, since SelectionBuilder always join multiple where calls using AND,
// even if malicious code specifying additional conditions on account_name won't
// be able to fetch data from a different account.
return builder.table(Tables.MY_FEEDBACK_SUBMITTED)
.where(MyFeedbackSubmitted.MY_FEEDBACK_SUBMITTED_ACCOUNT_NAME + "=?",
accountName);
}
case MY_VIEWED_VIDEOS: {
// force a where condition to avoid leaking schedule info to another account
// Note that, since SelectionBuilder always join multiple where calls using AND,
// even if malicious code specifying additional conditions on account_name won't
// be able to fetch data from a different account.
return builder.table(Tables.MY_VIEWED_VIDEO)
.where(MyViewedVideos.MY_VIEWED_VIDEOS_ACCOUNT_NAME + "=?",
accountName);
}
case SPEAKERS_ID: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SPEAKERS)
.where(Speakers.SPEAKER_ID + "=?", speakerId);
}
case SPEAKERS_ID_SESSIONS: {
final String speakerId = Speakers.getSpeakerId(uri);
return builder.table(Tables.SESSIONS_SPEAKERS_JOIN_SESSIONS_ROOMS,
accountName, accountName)
.mapToTable(Sessions._ID, Tables.SESSIONS)
.mapToTable(Sessions.SESSION_ID, Tables.SESSIONS)
.mapToTable(Sessions.ROOM_ID, Tables.SESSIONS)
.map(Sessions.SESSION_IN_MY_SCHEDULE, "IFNULL(in_schedule, 0)")
.map(Sessions.SESSION_RESERVATION_STATUS, "IFNULL(" + MyReservations.
MY_RESERVATION_STATUS + ", -1)")
.where(Qualified.SESSIONS_SPEAKERS_SPEAKER_ID + "=?", speakerId);
}
case ANNOUNCEMENTS: {
return builder.table(Tables.ANNOUNCEMENTS);
}
case ANNOUNCEMENTS_ID: {
final String announcementId = Announcements.getAnnouncementId(uri);
return builder.table(Tables.ANNOUNCEMENTS)
.where(Announcements.ANNOUNCEMENT_ID + "=?", announcementId);
}
case MAPGEOJSON: {
return builder.table(Tables.MAPGEOJSON);
}
case MAPTILES: {
return builder.table(Tables.MAPTILES);
}
case FEEDBACK_FOR_SESSION: {
final String sessionId = Feedback.getSessionId(uri);
return builder.table(Tables.FEEDBACK)
.where(Feedback.SESSION_ID + "=?", sessionId);
}
case FEEDBACK_ALL: {
return builder.table(Tables.FEEDBACK);
}
case HASHTAGS: {
return builder.table(Tables.HASHTAGS);
}
case HASHTAGS_NAME: {
final String hashtagName = Hashtags.getHashtagName(uri);
return builder.table(Tables.HASHTAGS)
.where(HashtagColumns.HASHTAG_NAME + "=?", hashtagName);
}
case VIDEOS: {
return builder.table(Tables.VIDEOS);
}
case VIDEOS_ID: {
final String videoId = Videos.getVideoId(uri);
return builder.table(Tables.VIDEOS)
.where(VideoColumns.VIDEO_ID + "=?", videoId);
}
default: {
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
}
@Override
public ParcelFileDescriptor openFile(Uri uri, String mode) throws FileNotFoundException {
throw new UnsupportedOperationException("openFile is not supported for " + uri);
}
private interface Subquery {
String SESSION_HAS_GIVEN_FEEDBACK = "(SELECT COUNT(1) FROM "
+ Tables.FEEDBACK + " WHERE " + Qualified.FEEDBACK_SESSION_ID + "="
+ Qualified.SESSIONS_SESSION_ID + ")";
String SESSIONS_SNIPPET = "snippet(" + Tables.SESSIONS_SEARCH + ",'{','}','\u2026')";
String RELATED_SESSIONS_SELECTION = Qualified.SESSIONS_SESSION_ID + " IN (SELECT "
+ Sessions.RELATED_SESSION_ID + " FROM " + Tables.RELATED_SESSIONS + " WHERE "
+ Sessions.SESSION_ID + " = ?)";
}
/**
* {@link ScheduleContract} fields that are fully qualified with a specific
* parent {@link Tables}. Used when needed to work around SQL ambiguity.
*/
private interface Qualified {
String SESSIONS_SESSION_ID = Tables.SESSIONS + "." + Sessions.SESSION_ID;
String SESSIONS_ROOM_ID = Tables.SESSIONS + "." + Sessions.ROOM_ID;
String SESSIONS_TAGS_SESSION_ID = Tables.SESSIONS_TAGS + "."
+ ScheduleDatabase.SessionsTags.SESSION_ID;
String SESSIONS_SPEAKERS_SESSION_ID = Tables.SESSIONS_SPEAKERS + "."
+ SessionsSpeakers.SESSION_ID;
String SESSIONS_SPEAKERS_SPEAKER_ID = Tables.SESSIONS_SPEAKERS + "."
+ SessionsSpeakers.SPEAKER_ID;
String FEEDBACK_SESSION_ID = Tables.FEEDBACK + "." + Feedback.SESSION_ID;
}
}
| WeRockStar/iosched | lib/src/main/java/com/google/samples/apps/iosched/provider/ScheduleProvider.java | Java | apache-2.0 | 51,535 |
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import subscribing from '@/mixins/subscribing';
import {timer} from '@/utils/rxjs';
import moment from 'moment';
export default {
props: ['value'],
mixins: [subscribing],
data: () => ({
startTs: null,
offset: null
}),
render() {
return this._v(this.clock);
},
computed: {
clock() {
if (!this.value) {
return null;
}
const duration = moment.duration(this.value + this.offset);
return `${Math.floor(duration.asDays())}d ${duration.hours()}h ${duration.minutes()}m ${duration.seconds()}s`;
}
},
watch: {
value: 'subscribe'
},
methods: {
createSubscription() {
if (this.value) {
const vm = this;
vm.startTs = moment();
vm.offset = 0;
return timer(0, 1000).subscribe({
next: () => {
vm.offset = moment().valueOf() - vm.startTs.valueOf();
}
})
}
}
}
}
| codecentric/spring-boot-admin | spring-boot-admin-server-ui/src/main/frontend/views/instances/details/process-uptime.js | JavaScript | apache-2.0 | 1,540 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ngrinder.script.handler;
import freemarker.template.Configuration;
import freemarker.template.DefaultObjectWrapper;
import freemarker.template.Template;
import org.apache.commons.io.FilenameUtils;
import org.ngrinder.common.constant.ControllerConstants;
import org.ngrinder.common.util.FileUtils;
import org.ngrinder.common.util.PathUtils;
import org.ngrinder.common.util.PropertiesWrapper;
import org.ngrinder.model.User;
import org.ngrinder.script.model.FileEntry;
import org.ngrinder.script.model.FileType;
import org.ngrinder.script.repository.FileEntryRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ClassPathResource;
import java.io.File;
import java.io.StringWriter;
import java.util.List;
import java.util.Map;
import static org.apache.commons.lang.StringUtils.startsWithIgnoreCase;
import static org.ngrinder.common.util.CollectionUtils.newArrayList;
import static org.ngrinder.common.util.ExceptionUtils.processException;
/**
* Script per language handler. This is the superclass for all sub
* {@link ScriptHandler}s which implements the specific processing of each
* language.
*
* @author JunHo Yoon
* @since 3.2
*/
public abstract class ScriptHandler implements ControllerConstants {
protected static final Logger LOGGER = LoggerFactory.getLogger(JythonScriptHandler.class);
private final String codemirrorKey;
private final String title;
private final String extension;
private final String key;
/**
* Constructor.
*
* @param key key of the script handler
* @param extension extension
* @param title title of the handler
* @param codeMirrorKey code mirror key
*/
public ScriptHandler(String key, String extension, String title, String codeMirrorKey) {
this.key = key;
this.extension = extension;
this.title = title;
this.codemirrorKey = codeMirrorKey;
}
@Autowired
private FileEntryRepository fileEntryRepository;
/**
* Get the display order of {@link ScriptHandler}s.
*
* @return order
*/
public abstract Integer displayOrder();
public String getCodemirrorKey() {
return codemirrorKey;
}
/**
* Check if the given fileEntry can be handled by this handler.
*
* @param fileEntry fileEntry to be checked
* @return true if the given fileEntry can be handled
*/
public boolean canHandle(FileEntry fileEntry) {
return FilenameUtils.isExtension(fileEntry.getPath(), getExtension());
}
public String getExtension() {
return extension;
}
/**
* Get the handler resolution order.
* <p/>
* Less is more prioritized.
*
* @return the order of handler resolution
*/
protected abstract Integer order();
@SuppressWarnings("SpellCheckingInspection")
public boolean isValidatable() {
return true;
}
/**
* Return if it's project handler which implements {@link ProjectHandler}.
*
* @return true if it is.
*/
@SuppressWarnings("UnusedDeclaration")
public boolean isProjectHandler() {
return (this instanceof ProjectHandler);
}
/**
* Prepare the distribution.
*
* @param testCaseId id of the test case. This is for the log identification.
* @param user user who will distribute the script.
* @param scriptEntry script to be distributed.
* @param distDir distribution target dir.
* @param properties properties set which is used for detailed distribution control.
* @param processingResult processing result holder.
*/
public void prepareDist(Long testCaseId,
User user, //
FileEntry scriptEntry, File distDir, PropertiesWrapper properties,
ProcessingResultPrintStream processingResult) {
prepareDefaultFile(distDir, properties);
List<FileEntry> fileEntries = getLibAndResourceEntries(user, scriptEntry, -1);
if (scriptEntry.getRevision() != 0) {
fileEntries.add(scriptEntry);
}
String basePath = getBasePath(scriptEntry);
// Distribute each files in that folder.
for (FileEntry each : fileEntries) {
// Directory is not subject to be distributed.
if (each.getFileType() == FileType.DIR) {
continue;
}
File toDir = new File(distDir, calcDistSubPath(basePath, each));
processingResult.printf("%s is being written.\n", each.getPath());
LOGGER.info("{} is being written in {} for test {}", new Object[]{each.getPath(), toDir, testCaseId});
getFileEntryRepository().writeContentTo(user, each.getPath(), toDir);
}
processingResult.setSuccess(true);
prepareDistMore(testCaseId, user, scriptEntry, distDir, properties, processingResult);
}
/**
* Prepare script creation. This method is subject to be extended by the
* subclasses.
* <p/>
* This method is the perfect place if it's necessary to include additional
* files.
*
* @param user user
* @param path base path
* @param fileName fileName
* @param name name
* @param url url
* @param createLibAndResources true if lib and resources should be created
* @return true if process more.
*/
public boolean prepareScriptEnv(User user, String path, String fileName, String name, String url,
boolean createLibAndResources, String scriptContent) {
return true;
}
/**
* Prepare the distribution more. This method is subject to be extended by
* the subclass.
*
* @param testCaseId test case id. This is for the log identification.
* @param user user
* @param script script entry to be distributed.
* @param distDir distribution directory
* @param properties properties
* @param processingResult processing result holder
*/
protected void prepareDistMore(Long testCaseId, User user, FileEntry script, File distDir,
PropertiesWrapper properties, ProcessingResultPrintStream processingResult) {
}
/**
* Get the appropriated distribution path for the given file entry.
*
* @param basePath distribution base path
* @param fileEntry fileEntry to be distributed
* @return the resolved destination path.
*/
protected String calcDistSubPath(String basePath, FileEntry fileEntry) {
String path = FilenameUtils.getPath(fileEntry.getPath());
path = path.substring(basePath.length());
return path;
}
/**
* Get all resources and lib entries belonging to the given user and
* scriptEntry.
*
* @param user user
* @param scriptEntry script entry
* @param revision revision of the script entry.
* @return file entry list
*/
public List<FileEntry> getLibAndResourceEntries(User user, FileEntry scriptEntry, long revision) {
String path = FilenameUtils.getPath(scriptEntry.getPath());
List<FileEntry> fileList = newArrayList();
for (FileEntry eachFileEntry : getFileEntryRepository().findAll(user, path + "lib/", revision, true)) {
// Skip jython 2.5... it's already included.
if (startsWithIgnoreCase(eachFileEntry.getFileName(), "jython-2.5.")
|| startsWithIgnoreCase(eachFileEntry.getFileName(), "jython-standalone-2.5.")) {
continue;
}
FileType fileType = eachFileEntry.getFileType();
if (fileType.isLibDistributable()) {
fileList.add(eachFileEntry);
}
}
for (FileEntry eachFileEntry : getFileEntryRepository().findAll(user, path + "resources/", revision, true)) {
FileType fileType = eachFileEntry.getFileType();
if (fileType.isResourceDistributable()) {
fileList.add(eachFileEntry);
}
}
return fileList;
}
protected void prepareDefaultFile(File distDir, PropertiesWrapper properties) {
if (properties.getPropertyBoolean(PROP_CONTROLLER_DIST_LOGBACK)) {
FileUtils.copyResourceToFile("/logback/logback-worker.xml", new File(distDir, "logback-worker.xml"));
}
}
protected String getBasePath(FileEntry script) {
return getBasePath(script.getPath());
}
/**
* Get the base path of the given path.
*
* @param path path
* @return base path
*/
public String getBasePath(String path) {
return FilenameUtils.getPath(path);
}
/**
* Get executable script path.
*
* @param svnPath path in svn
* @return path executable in agent.
*/
public String getScriptExecutePath(String svnPath) {
return FilenameUtils.getName(svnPath);
}
/**
* Check syntax errors for the given content.
*
* @param path path
* @param content content
* @return syntax error messages. null if none.
*/
public abstract String checkSyntaxErrors(String path, String content);
/**
* Get the initial script with the given value map.
*
* @param values map of initial script referencing values.
* @return generated string
*/
public String getScriptTemplate(Map<String, Object> values) {
try {
Configuration freemarkerConfig = new Configuration();
ClassPathResource cpr = new ClassPathResource("script_template");
freemarkerConfig.setDirectoryForTemplateLoading(cpr.getFile());
freemarkerConfig.setObjectWrapper(new DefaultObjectWrapper());
Template template = freemarkerConfig.getTemplate("basic_template_" + getExtension() + ".ftl");
StringWriter writer = new StringWriter();
template.process(values, writer);
return writer.toString();
} catch (Exception e) {
throw processException("Error while fetching the script template.", e);
}
}
public String getTitle() {
return title;
}
public String getKey() {
return key;
}
FileEntryRepository getFileEntryRepository() {
return fileEntryRepository;
}
void setFileEntryRepository(FileEntryRepository fileEntryRepository) {
this.fileEntryRepository = fileEntryRepository;
}
/**
* Get the default quick test file.
*
* @param basePath base path
* @return quick test file
*/
public FileEntry getDefaultQuickTestFilePath(String basePath) {
FileEntry fileEntry = new FileEntry();
fileEntry.setPath(PathUtils.join(basePath, "TestRunner." + getExtension()));
return fileEntry;
}
}
| songeunwoo/ngrinder | ngrinder-controller/src/main/java/org/ngrinder/script/handler/ScriptHandler.java | Java | apache-2.0 | 10,936 |
module.exports = function(ctx) {
var fs = ctx.requireCordovaModule('fs'),
path = ctx.requireCordovaModule('path'),
os = require("os"),
readline = require("readline"),
deferral = ctx.requireCordovaModule('q').defer();
var lineReader = readline.createInterface({
terminal: false,
input : fs.createReadStream('platforms/android/build.gradle')
});
lineReader.on("line", function(line) {
fs.appendFileSync('./build.gradle', line.toString() + os.EOL);
if (/.*\ dependencies \{.*/.test(line)) {
fs.appendFileSync('./build.gradle', '\t\tclasspath "com.google.gms:google-services:3.0.0"' + os.EOL);
}
}).on("close", function () {
fs.rename('./build.gradle', 'platforms/android/build.gradle', deferral.resolve);
});
return deferral.promise;
};
| alfredo777/btmglobalconsulting | plugins/phonegap-plugin-push/scripts/copyAndroidFile.js | JavaScript | apache-2.0 | 856 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.exec.http;
import static org.apache.jena.http.HttpLib.*;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpRequest.BodyPublishers;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.concurrent.TimeUnit;
import org.apache.jena.atlas.RuntimeIOException;
import org.apache.jena.atlas.io.IO;
import org.apache.jena.atlas.iterator.Iter;
import org.apache.jena.atlas.json.JSON;
import org.apache.jena.atlas.json.JsonArray;
import org.apache.jena.atlas.json.JsonObject;
import org.apache.jena.atlas.lib.InternalErrorException;
import org.apache.jena.atlas.lib.Pair;
import org.apache.jena.atlas.logging.Log;
import org.apache.jena.atlas.web.HttpException;
import org.apache.jena.graph.Graph;
import org.apache.jena.graph.Triple;
import org.apache.jena.http.HttpEnv;
import org.apache.jena.http.HttpLib;
import org.apache.jena.query.*;
import org.apache.jena.riot.*;
import org.apache.jena.riot.resultset.ResultSetLang;
import org.apache.jena.riot.resultset.ResultSetReaderRegistry;
import org.apache.jena.riot.web.HttpNames;
import org.apache.jena.sparql.ARQException;
import org.apache.jena.sparql.core.DatasetGraph;
import org.apache.jena.sparql.core.DatasetGraphFactory;
import org.apache.jena.sparql.core.Quad;
import org.apache.jena.sparql.engine.http.HttpParams;
import org.apache.jena.sparql.engine.http.QueryExceptionHTTP;
import org.apache.jena.sparql.exec.QueryExec;
import org.apache.jena.sparql.exec.RowSet;
import org.apache.jena.sparql.util.Context;
/**
* A {@link QueryExec} implementation where queries are executed against a remote
* service over HTTP.
*/
public class QueryExecHTTP implements QueryExec {
/** @deprecated Use {@link #newBuilder} */
@Deprecated
public static QueryExecHTTPBuilder create() { return newBuilder() ; }
public static QueryExecHTTPBuilder newBuilder() { return QueryExecHTTPBuilder.create(); }
public static QueryExecHTTPBuilder service(String serviceURL) {
return QueryExecHTTP.newBuilder().endpoint(serviceURL);
}
//public static final String QUERY_MIME_TYPE = WebContent.contentTypeSPARQLQuery;
private final Query query;
private final String queryString;
private final String service;
private final Context context;
// Params
private Params params = null;
private final QuerySendMode sendMode;
private int urlLimit = HttpEnv.urlLimit;
// Protocol
private List<String> defaultGraphURIs = new ArrayList<>();
private List<String> namedGraphURIs = new ArrayList<>();
private boolean closed = false;
// Timeout of query execution.
private long readTimeout = -1;
private TimeUnit readTimeoutUnit = TimeUnit.MILLISECONDS;
// Content Types: these list the standard formats and also include */*.
private final String selectAcceptheader = WebContent.defaultSparqlResultsHeader;
private final String askAcceptHeader = WebContent.defaultSparqlAskHeader;
private final String describeAcceptHeader = WebContent.defaultGraphAcceptHeader;
private final String constructAcceptHeader = WebContent.defaultGraphAcceptHeader;
private final String datasetAcceptHeader = WebContent.defaultDatasetAcceptHeader;
// If this is non-null, it overrides the use of any Content-Type above.
private String appProvidedAcceptHeader = null;
// Received content type
private String httpResponseContentType = null;
// Releasing HTTP input streams is important. We remember this for SELECT result
// set streaming, and will close it when the execution is closed
private InputStream retainedConnection = null;
private HttpClient httpClient = HttpEnv.getDftHttpClient();
private Map<String, String> httpHeaders;
public QueryExecHTTP(String serviceURL, Query query, String queryString, int urlLimit,
HttpClient httpClient, Map<String, String> httpHeaders, Params params, Context context,
List<String> defaultGraphURIs, List<String> namedGraphURIs,
QuerySendMode sendMode, String explicitAcceptHeader,
long timeout, TimeUnit timeoutUnit) {
this.context = ( context == null ) ? ARQ.getContext().copy() : context.copy();
this.service = serviceURL;
this.query = query;
this.queryString = queryString;
this.urlLimit = urlLimit;
this.httpHeaders = httpHeaders;
this.defaultGraphURIs = defaultGraphURIs;
this.namedGraphURIs = namedGraphURIs;
this.sendMode = Objects.requireNonNull(sendMode);
this.appProvidedAcceptHeader = explicitAcceptHeader;
// Important - handled as special case because the defaults vary by query type.
if ( httpHeaders.containsKey(HttpNames.hAccept) ) {
if ( this.appProvidedAcceptHeader != null )
this.appProvidedAcceptHeader = httpHeaders.get(HttpNames.hAccept);
this.httpHeaders.remove(HttpNames.hAccept);
}
this.httpHeaders = httpHeaders;
this.params = params;
this.readTimeout = timeout;
this.readTimeoutUnit = timeoutUnit;
this.httpClient = HttpLib.dft(httpClient, HttpEnv.getDftHttpClient());
}
/** The Content-Type response header received (null before the remote operation is attempted). */
public String getHttpResponseContentType() {
return httpResponseContentType;
}
@Override
public RowSet select() {
checkNotClosed();
check(QueryType.SELECT);
RowSet rs = execRowSet();
return rs;
}
private RowSet execRowSet() {
// Use the explicitly given header or the default selectAcceptheader
String thisAcceptHeader = dft(appProvidedAcceptHeader, selectAcceptheader);
HttpResponse<InputStream> response = performQuery(thisAcceptHeader);
InputStream in = HttpLib.getInputStream(response);
// Don't assume the endpoint actually gives back the content type we asked for
String actualContentType = responseHeader(response, HttpNames.hContentType);
// Remember the response.
httpResponseContentType = actualContentType;
// More reliable to use the format-defined charsets e.g. JSON -> UTF-8
actualContentType = removeCharset(actualContentType);
if (false) {
byte b[] = IO.readWholeFile(in);
String str = new String(b);
System.out.println(str);
in = new ByteArrayInputStream(b);
}
retainedConnection = in; // This will be closed on close()
if (actualContentType == null || actualContentType.equals(""))
actualContentType = WebContent.contentTypeResultsXML;
// Map to lang, with pragmatic alternatives.
Lang lang = WebContent.contentTypeToLangResultSet(actualContentType);
if ( lang == null )
throw new QueryException("Endpoint returned Content-Type: " + actualContentType + " which is not recognized for SELECT queries");
if ( !ResultSetReaderRegistry.isRegistered(lang) )
throw new QueryException("Endpoint returned Content-Type: " + actualContentType + " which is not supported for SELECT queries");
// This returns a streaming result set for some formats.
// Do not close the InputStream at this point.
ResultSet result = ResultSetMgr.read(in, lang);
return RowSet.adapt(result);
}
@Override
public boolean ask() {
checkNotClosed();
check(QueryType.ASK);
String thisAcceptHeader = dft(appProvidedAcceptHeader, askAcceptHeader);
HttpResponse<InputStream> response = performQuery(thisAcceptHeader);
InputStream in = HttpLib.getInputStream(response);
String actualContentType = responseHeader(response, HttpNames.hContentType);
httpResponseContentType = actualContentType;
actualContentType = removeCharset(actualContentType);
// If the server fails to return a Content-Type then we will assume
// the server returned the type we asked for
if (actualContentType == null || actualContentType.equals(""))
actualContentType = askAcceptHeader;
Lang lang = RDFLanguages.contentTypeToLang(actualContentType);
if ( lang == null ) {
// Any specials :
// application/xml for application/sparql-results+xml
// application/json for application/sparql-results+json
if (actualContentType.equals(WebContent.contentTypeXML))
lang = ResultSetLang.RS_XML;
else if ( actualContentType.equals(WebContent.contentTypeJSON))
lang = ResultSetLang.RS_JSON;
}
if ( lang == null )
throw new QueryException("Endpoint returned Content-Type: " + actualContentType + " which is not supported for ASK queries");
boolean result = ResultSetMgr.readBoolean(in, lang);
finish(in);
return result;
}
private String removeCharset(String contentType) {
int idx = contentType.indexOf(';');
if ( idx < 0 )
return contentType;
return contentType.substring(0,idx);
}
@Override
public Graph construct(Graph graph) {
checkNotClosed();
check(QueryType.CONSTRUCT);
return execGraph(graph, constructAcceptHeader);
}
@Override
public Iterator<Triple> constructTriples() {
checkNotClosed();
check(QueryType.CONSTRUCT);
return execTriples(constructAcceptHeader);
}
@Override
public Iterator<Quad> constructQuads(){
checkNotClosed();
return execQuads();
}
@Override
public DatasetGraph constructDataset(){
checkNotClosed();
return constructDataset(DatasetGraphFactory.createTxnMem());
}
@Override
public DatasetGraph constructDataset(DatasetGraph dataset){
checkNotClosed();
check(QueryType.CONSTRUCT);
return execDataset(dataset);
}
@Override
public Graph describe(Graph graph) {
checkNotClosed();
check(QueryType.DESCRIBE);
return execGraph(graph, describeAcceptHeader);
}
@Override
public Iterator<Triple> describeTriples() {
checkNotClosed();
return execTriples(describeAcceptHeader);
}
private Graph execGraph(Graph graph, String acceptHeader) {
Pair<InputStream, Lang> p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML);
InputStream in = p.getLeft();
Lang lang = p.getRight();
try {
RDFDataMgr.read(graph, in, lang);
} catch (RiotException ex) {
HttpLib.finish(in);
throw ex;
}
return graph;
}
private DatasetGraph execDataset(DatasetGraph dataset) {
Pair<InputStream, Lang> p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads);
InputStream in = p.getLeft();
Lang lang = p.getRight();
try {
RDFDataMgr.read(dataset, in, lang);
} catch (RiotException ex) {
finish(in);
throw ex;
}
return dataset;
}
@SuppressWarnings("deprecation")
private Iterator<Triple> execTriples(String acceptHeader) {
Pair<InputStream, Lang> p = execRdfWorker(acceptHeader, WebContent.contentTypeRDFXML);
InputStream input = p.getLeft();
Lang lang = p.getRight();
// Base URI?
// Unless N-Triples, this creates a thread.
Iterator<Triple> iter = RDFDataMgr.createIteratorTriples(input, lang, null);
return Iter.onCloseIO(iter, input);
}
@SuppressWarnings("deprecation")
private Iterator<Quad> execQuads() {
checkNotClosed();
Pair<InputStream, Lang> p = execRdfWorker(datasetAcceptHeader, WebContent.contentTypeNQuads);
InputStream input = p.getLeft();
Lang lang = p.getRight();
// Unless N-Quads, this creates a thread.
Iterator<Quad> iter = RDFDataMgr.createIteratorQuads(input, lang, null);
return Iter.onCloseIO(iter, input);
}
// Any RDF data back (CONSTRUCT, DESCRIBE, QUADS)
// ifNoContentType - some wild guess at the content type.
private Pair<InputStream, Lang> execRdfWorker(String contentType, String ifNoContentType) {
checkNotClosed();
String thisAcceptHeader = dft(appProvidedAcceptHeader, contentType);
HttpResponse<InputStream> response = performQuery(thisAcceptHeader);
InputStream in = HttpLib.getInputStream(response);
// Don't assume the endpoint actually gives back the content type we asked for
String actualContentType = responseHeader(response, HttpNames.hContentType);
httpResponseContentType = actualContentType;
actualContentType = removeCharset(actualContentType);
// If the server fails to return a Content-Type then we will assume
// the server returned the type we asked for
if (actualContentType == null || actualContentType.equals(""))
actualContentType = ifNoContentType;
Lang lang = RDFLanguages.contentTypeToLang(actualContentType);
if ( ! RDFLanguages.isQuads(lang) && ! RDFLanguages.isTriples(lang) )
throw new QueryException("Endpoint returned Content Type: "
+ actualContentType
+ " which is not a valid RDF syntax");
return Pair.create(in, lang);
}
@Override
public JsonArray execJson() {
checkNotClosed();
check(QueryType.CONSTRUCT_JSON);
String thisAcceptHeader = dft(appProvidedAcceptHeader, WebContent.contentTypeJSON);
HttpResponse<InputStream> response = performQuery(thisAcceptHeader);
InputStream in = HttpLib.getInputStream(response);
try {
return JSON.parseAny(in).getAsArray();
} finally { finish(in); }
}
@Override
public Iterator<JsonObject> execJsonItems() {
JsonArray array = execJson().getAsArray();
List<JsonObject> x = new ArrayList<>(array.size());
array.forEach(elt->{
if ( ! elt.isObject())
throw new QueryExecException("Item in an array from a JSON query isn't an object");
x.add(elt.getAsObject());
});
return x.iterator();
}
private void checkNotClosed() {
if ( closed )
throw new QueryExecException("HTTP QueryExecHTTP has been closed");
}
private void check(QueryType queryType) {
if ( query == null ) {
// Pass through the queryString.
return;
}
if ( query.queryType() != queryType )
throw new QueryExecException("Not the right form of query. Expected "+queryType+" but got "+query.queryType());
}
@Override
public Context getContext() {
return context;
}
@Override
public DatasetGraph getDataset() {
return null;
}
// This may be null - if we were created form a query string,
// we don't guarantee to parse it so we let through non-SPARQL
// extensions to the far end.
@Override
public Query getQuery() {
if ( query != null )
return query;
if ( queryString != null ) {
// Object not created with a Query object, may be because there is foreign
// syntax in the query or may be because the query string was available and the app
// didn't want the overhead of parsing it every time.
// Try to parse it else return null;
try { return QueryFactory.create(queryString, Syntax.syntaxARQ); }
catch (QueryParseException ex) {}
return null;
}
return null;
}
/**
* Return the query string. If this was supplied as a string,
* there is no guarantee this is legal SPARQL syntax.
*/
@Override
public String getQueryString() {
return queryString;
}
private static long asMillis(long duration, TimeUnit timeUnit) {
return (duration < 0) ? duration : timeUnit.toMillis(duration);
}
/**
* Make a query over HTTP.
* The response is returned after status code processing so the caller can assume the
* query execution was successful and return 200.
* Use {@link HttpLib#getInputStream} to access the body.
*/
private HttpResponse<InputStream> performQuery(String reqAcceptHeader) {
if (closed)
throw new ARQException("HTTP execution already closed");
// SERVICE specials.
Params thisParams = Params.create(params);
if ( defaultGraphURIs != null ) {
for ( String dft : defaultGraphURIs )
thisParams.add( HttpParams.pDefaultGraph, dft );
}
if ( namedGraphURIs != null ) {
for ( String name : namedGraphURIs )
thisParams.add( HttpParams.pNamedGraph, name );
}
HttpLib.modifyByService(service, context, thisParams, httpHeaders);
HttpRequest request = makeRequest(thisParams, reqAcceptHeader);
return executeQuery(request);
}
private HttpRequest makeRequest(Params thisParams, String reqAcceptHeader) {
QuerySendMode actualSendMode = actualSendMode();
HttpRequest.Builder requestBuilder;
switch(actualSendMode) {
case asGetAlways :
requestBuilder = executeQueryGet(thisParams, reqAcceptHeader);
break;
case asPostForm :
requestBuilder = executeQueryPostForm(thisParams, reqAcceptHeader);
break;
case asPost :
requestBuilder = executeQueryPostBody(thisParams, reqAcceptHeader);
break;
default :
// Should not happen!
throw new InternalErrorException("Invalid value for 'actualSendMode' "+actualSendMode);
}
return requestBuilder.build();
}
private HttpResponse<InputStream> executeQuery(HttpRequest request) {
logQuery(queryString, request);
try {
HttpResponse<InputStream> response = execute(httpClient, request);
HttpLib.handleHttpStatusCode(response);
return response;
} catch (HttpException httpEx) {
throw QueryExceptionHTTP.rewrap(httpEx);
}
}
private QuerySendMode actualSendMode() {
int thisLengthLimit = urlLimit;
switch(sendMode) {
case asGetAlways :
case asPostForm :
case asPost :
return sendMode;
case asGetWithLimitBody :
case asGetWithLimitForm :
break;
}
// Only QuerySendMode.asGetWithLimitBody and QuerySendMode.asGetWithLimitForm here.
String requestURL = service;
// Other params (query= has not been added at this point)
int paramsLength = params.httpString().length();
int qEncodedLength = calcEncodeStringLength(queryString);
// URL Length, including service (for safety)
int length = service.length()
+ /* ?query= */ 1 + HttpParams.pQuery.length()
+ /* encoded query */ qEncodedLength
+ /* &other params*/ 1 + paramsLength;
if ( length <= thisLengthLimit )
return QuerySendMode.asGetAlways;
return (sendMode==QuerySendMode.asGetWithLimitBody) ? QuerySendMode.asPost : QuerySendMode.asPostForm;
}
private static int calcEncodeStringLength(String str) {
// Could approximate by counting non-queryString character and adding that *2 to the length of the string.
String qs = HttpLib.urlEncodeQueryString(str);
int encodedLength = qs.length();
return encodedLength;
}
private HttpRequest.Builder executeQueryGet(Params thisParams, String acceptHeader) {
thisParams.add(HttpParams.pQuery, queryString);
String requestURL = requestURL(service, thisParams.httpString());
HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit);
acceptHeader(builder, acceptHeader);
return builder.GET();
}
private HttpRequest.Builder executeQueryPostForm(Params thisParams, String acceptHeader) {
thisParams.add(HttpParams.pQuery, queryString);
String requestURL = service;
String formBody = thisParams.httpString();
HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit);
acceptHeader(builder, acceptHeader);
// Use an HTML form.
contentTypeHeader(builder, WebContent.contentTypeHTMLForm);
// Already UTF-8 encoded to ASCII.
return builder.POST(BodyPublishers.ofString(formBody, StandardCharsets.US_ASCII));
}
// Use SPARQL query body and MIME type.
private HttpRequest.Builder executeQueryPostBody(Params thisParams, String acceptHeader) {
// Use thisParams (for default-graph-uri etc)
String requestURL = requestURL(service, thisParams.httpString());
HttpRequest.Builder builder = HttpLib.requestBuilder(requestURL, httpHeaders, readTimeout, readTimeoutUnit);
contentTypeHeader(builder, WebContent.contentTypeSPARQLQuery);
acceptHeader(builder, acceptHeader);
return builder.POST(BodyPublishers.ofString(queryString));
}
private static void logQuery(String queryString, HttpRequest request) {}
/**
* Cancel query evaluation
*/
public void cancel() {
closed = true;
}
@Override
public void abort() {
try {
close();
} catch (Exception ex) {
Log.warn(this, "Error during abort", ex);
}
}
@Override
public void close() {
closed = true;
if (retainedConnection != null) {
try {
// This call may take a long time if the response has not been consumed
// as HTTP client will consume the remaining response so it can re-use the
// connection. If we're closing when we're not at the end of the stream then
// issue a warning to the logs
if (retainedConnection.read() != -1)
Log.warn(this, "HTTP response not fully consumed, if HTTP Client is reusing connections (its default behaviour) then it will consume the remaining response data which may take a long time and cause this application to become unresponsive");
retainedConnection.close();
} catch (RuntimeIOException | java.io.IOException e) {
// If we are closing early and the underlying stream is chunk encoded
// the close() can result in a IOException. TypedInputStream catches
// and re-wraps that and we want to suppress both forms.
} finally {
retainedConnection = null;
}
}
}
@Override
public boolean isClosed() { return closed; }
}
| apache/jena | jena-arq/src/main/java/org/apache/jena/sparql/exec/http/QueryExecHTTP.java | Java | apache-2.0 | 24,139 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.driver;
import io.netty.handler.codec.CodecException;
import org.apache.tinkerpop.gremlin.driver.exception.ConnectionException;
import org.apache.tinkerpop.gremlin.driver.message.RequestMessage;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelPromise;
import io.netty.channel.socket.nio.NioSocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* A single connection to a Gremlin Server instance.
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
final class Connection {
private static final Logger logger = LoggerFactory.getLogger(Connection.class);
private final Channel channel;
private final URI uri;
private final ConcurrentMap<UUID, ResultQueue> pending = new ConcurrentHashMap<>();
private final Cluster cluster;
private final Client client;
private final ConnectionPool pool;
private final long keepAliveInterval;
public static final int MAX_IN_PROCESS = 4;
public static final int MIN_IN_PROCESS = 1;
public static final int MAX_WAIT_FOR_CONNECTION = 3000;
public static final int MAX_WAIT_FOR_SESSION_CLOSE = 3000;
public static final int MAX_CONTENT_LENGTH = 65536;
public static final int RECONNECT_INTERVAL = 1000;
public static final int RESULT_ITERATION_BATCH_SIZE = 64;
public static final long KEEP_ALIVE_INTERVAL = 1800000;
/**
* When a {@code Connection} is borrowed from the pool, this number is incremented to indicate the number of
* times it has been taken and is decremented when it is returned. This number is one indication as to how
* busy a particular {@code Connection} is.
*/
public final AtomicInteger borrowed = new AtomicInteger(0);
private final AtomicReference<Class<Channelizer>> channelizerClass = new AtomicReference<>(null);
private volatile boolean isDead = false;
private final int maxInProcess;
private final String connectionLabel;
private final Channelizer channelizer;
private final AtomicReference<CompletableFuture<Void>> closeFuture = new AtomicReference<>();
private final AtomicBoolean shutdownInitiated = new AtomicBoolean(false);
private final AtomicReference<ScheduledFuture> keepAliveFuture = new AtomicReference<>();
public Connection(final URI uri, final ConnectionPool pool, final int maxInProcess) throws ConnectionException {
this.uri = uri;
this.cluster = pool.getCluster();
this.client = pool.getClient();
this.pool = pool;
this.maxInProcess = maxInProcess;
this.keepAliveInterval = pool.settings().keepAliveInterval;
connectionLabel = String.format("Connection{host=%s}", pool.host);
if (cluster.isClosing()) throw new IllegalStateException("Cannot open a connection with the cluster after close() is called");
final Bootstrap b = this.cluster.getFactory().createBootstrap();
try {
if (channelizerClass.get() == null) {
channelizerClass.compareAndSet(null, (Class<Channelizer>) Class.forName(cluster.connectionPoolSettings().channelizer));
}
channelizer = channelizerClass.get().newInstance();
channelizer.init(this);
b.channel(NioSocketChannel.class).handler(channelizer);
channel = b.connect(uri.getHost(), uri.getPort()).sync().channel();
channelizer.connected();
logger.info("Created new connection for {}", uri);
} catch (Exception ie) {
logger.debug("Error opening connection on {}", uri);
throw new ConnectionException(uri, "Could not open connection", ie);
}
}
/**
* A connection can only have so many things in process happening on it at once, where "in process" refers to
* the maximum number of in-process requests less the number of pending responses.
*/
public int availableInProcess() {
// no need for a negative available amount - not sure that the pending size can ever exceed maximum, but
// better to avoid the negatives that would ensue if it did
return Math.max(0, maxInProcess - pending.size());
}
public boolean isDead() {
return isDead;
}
boolean isClosing() {
return closeFuture.get() != null;
}
URI getUri() {
return uri;
}
Cluster getCluster() {
return cluster;
}
Client getClient() {
return client;
}
ConcurrentMap<UUID, ResultQueue> getPending() {
return pending;
}
public synchronized CompletableFuture<Void> closeAsync() {
if (isClosing()) return closeFuture.get();
final CompletableFuture<Void> future = new CompletableFuture<>();
closeFuture.set(future);
// stop any pings being sent at the server for keep-alive
final ScheduledFuture keepAlive = keepAliveFuture.get();
if (keepAlive != null) keepAlive.cancel(true);
// make sure all requests in the queue are fully processed before killing. if they are then shutdown
// can be immediate. if not this method will signal the readCompleted future defined in the write()
// operation to check if it can close. in this way the connection no longer receives writes, but
// can continue to read. If a request never comes back the future won't get fulfilled and the connection
// will maintain a "pending" request, that won't quite ever go away. The build up of such a dead requests
// on a connection in the connection pool will force the pool to replace the connection for a fresh one.
if (isOkToClose()) {
if (null == channel)
future.complete(null);
else
shutdown(future);
} else {
// there may be some pending requests. schedule a job to wait for those to complete and then shutdown
new CheckForPending(future).runUntilDone(cluster.executor(), 1000, TimeUnit.MILLISECONDS);
}
return future;
}
public void close() {
try {
closeAsync().get();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
public ChannelPromise write(final RequestMessage requestMessage, final CompletableFuture<ResultSet> future) {
// once there is a completed write, then create a traverser for the result set and complete
// the promise so that the client knows that that it can start checking for results.
final Connection thisConnection = this;
final ChannelPromise requestPromise = channel.newPromise()
.addListener(f -> {
if (!f.isSuccess()) {
if (logger.isDebugEnabled())
logger.debug(String.format("Write on connection %s failed", thisConnection.getConnectionInfo()), f.cause());
thisConnection.isDead = true;
thisConnection.returnToPool();
cluster.executor().submit(() -> future.completeExceptionally(f.cause()));
} else {
final LinkedBlockingQueue<Result> resultLinkedBlockingQueue = new LinkedBlockingQueue<>();
final CompletableFuture<Void> readCompleted = new CompletableFuture<>();
// the callback for when the read was successful, meaning that ResultQueue.markComplete()
// was called
readCompleted.thenAcceptAsync(v -> {
thisConnection.returnToPool();
tryShutdown();
}, cluster.executor());
// the callback for when the read failed. a failed read means the request went to the server
// and came back with a server-side error of some sort. it means the server is responsive
// so this isn't going to be like a dead host situation which is handled above on a failed
// write operation.
//
// in the event of an IOException (typically means that the Connection might have
// been closed from the server side - this is typical in situations like when a request is
// sent that exceeds maxContentLength and the server closes the channel on its side) or other
// exceptions that indicate a non-recoverable state for the Connection object
// (a netty CorruptedFrameException is a good example of that), the Connection cannot simply
// be returned to the pool as future uses will end with refusal from the server and make it
// appear as a dead host as the write will not succeed. instead, the Connection needs to be
// replaced in these scenarios which destroys the dead channel on the client and allows a new
// one to be reconstructed.
readCompleted.exceptionally(t -> {
if (t instanceof IOException || t instanceof CodecException) {
if (pool != null) pool.replaceConnection(thisConnection);
} else {
thisConnection.returnToPool();
}
// close was signaled in closeAsync() but there were pending messages at that time. attempt
// the shutdown if the returned result cleared up the last pending message
tryShutdown();
return null;
});
final ResultQueue handler = new ResultQueue(resultLinkedBlockingQueue, readCompleted);
pending.put(requestMessage.getRequestId(), handler);
cluster.executor().submit(() -> future.complete(
new ResultSet(handler, cluster.executor(), readCompleted, requestMessage, pool.host)));
}
});
channel.writeAndFlush(requestMessage, requestPromise);
// try to keep the connection alive if the channel allows such things - websockets will
if (channelizer.supportsKeepAlive() && keepAliveInterval > 0) {
final ScheduledFuture oldKeepAliveFuture = keepAliveFuture.getAndSet(cluster.executor().scheduleAtFixedRate(() -> {
logger.debug("Request sent to server to keep {} alive", thisConnection);
try {
channel.writeAndFlush(channelizer.createKeepAliveMessage());
} catch (Exception ex) {
// will just log this for now - a future real request can be responsible for the failure that
// marks the host as dead. this also may not mean the host is actually dead. more robust handling
// is in play for real requests, not this simple ping
logger.warn(String.format("Keep-alive did not succeed on %s", thisConnection), ex);
}
}, keepAliveInterval, keepAliveInterval, TimeUnit.MILLISECONDS));
// try to cancel the old future if it's still un-executed - no need to ping since a new write has come
// through on the connection
if (oldKeepAliveFuture != null) oldKeepAliveFuture.cancel(true);
}
return requestPromise;
}
public void returnToPool() {
try {
if (pool != null) pool.returnConnection(this);
} catch (ConnectionException ce) {
if (logger.isDebugEnabled())
logger.debug("Returned {} connection to {} but an error occurred - {}", this.getConnectionInfo(), pool, ce.getMessage());
}
}
private boolean isOkToClose() {
return pending.isEmpty() || (channel !=null && !channel.isOpen()) || !pool.host.isAvailable();
}
/**
* Close was signaled in closeAsync() but there were pending messages at that time. This method attempts the
* shutdown if the returned result cleared up the last pending message.
*/
private void tryShutdown() {
if (isClosing() && isOkToClose())
shutdown(closeFuture.get());
}
private synchronized void shutdown(final CompletableFuture<Void> future) {
// shutdown can be called directly from closeAsync() or after write() and therefore this method should only
// be called once. once shutdown is initiated, it shouldn't be executed a second time or else it sends more
// messages at the server and leads to ugly log messages over there.
if (shutdownInitiated.compareAndSet(false, true)) {
// maybe this should be delegated back to the Client implementation??? kinda weird to instanceof here.....
if (client instanceof Client.SessionedClient) {
final boolean forceClose = client.getSettings().getSession().get().isForceClosed();
final RequestMessage closeMessage = client.buildMessage(
RequestMessage.build(Tokens.OPS_CLOSE).addArg(Tokens.ARGS_FORCE, forceClose)).create();
final CompletableFuture<ResultSet> closed = new CompletableFuture<>();
write(closeMessage, closed);
try {
// make sure we get a response here to validate that things closed as expected. on error, we'll let
// the server try to clean up on its own. the primary error here should probably be related to
// protocol issues which should not be something a user has to fuss with.
closed.join().all().get(cluster.connectionPoolSettings().maxWaitForSessionClose, TimeUnit.MILLISECONDS);
} catch (TimeoutException ex) {
final String msg = String.format(
"Timeout while trying to close connection on %s - force closing - server will close session on shutdown or expiration.",
((Client.SessionedClient) client).getSessionId());
logger.warn(msg, ex);
} catch (Exception ex) {
final String msg = String.format(
"Encountered an error trying to close connection on %s - force closing - server will close session on shutdown or expiration.",
((Client.SessionedClient) client).getSessionId());
logger.warn(msg, ex);
}
}
channelizer.close(channel);
final ChannelPromise promise = channel.newPromise();
promise.addListener(f -> {
if (f.cause() != null)
future.completeExceptionally(f.cause());
else
future.complete(null);
});
channel.close(promise);
}
}
public String getConnectionInfo() {
return String.format("Connection{host=%s, isDead=%s, borrowed=%s, pending=%s}",
pool.host, isDead, borrowed, pending.size());
}
@Override
public String toString() {
return connectionLabel;
}
/**
* Self-cancelling tasks that periodically checks for the pending queue to clear before shutting down the
* {@code Connection}. Once it does that, it self cancels the scheduled job in the executor.
*/
private final class CheckForPending implements Runnable {
private volatile ScheduledFuture<?> self;
private final CompletableFuture<Void> future;
CheckForPending(final CompletableFuture<Void> future) {
this.future = future;
}
@Override
public void run() {
logger.info("Checking for pending messages to complete before close on {}", this);
if (isOkToClose()) {
shutdown(future);
boolean interrupted = false;
try {
while(null == self) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
interrupted = true;
}
}
self.cancel(false);
} finally {
if(interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
void runUntilDone(final ScheduledExecutorService executor, final long period, final TimeUnit unit) {
self = executor.scheduleAtFixedRate(this, period, period, unit);
}
}
}
| jorgebay/tinkerpop | gremlin-driver/src/main/java/org/apache/tinkerpop/gremlin/driver/Connection.java | Java | apache-2.0 | 18,349 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.test.integration.functions.compress;
import org.apache.sysml.runtime.compress.CompressedMatrixBlock;
import org.apache.sysml.runtime.matrix.data.MatrixBlock;
import org.apache.sysml.runtime.util.DataConverter;
import org.apache.sysml.test.integration.AutomatedTestBase;
import org.apache.sysml.test.utils.TestUtils;
import org.junit.Assert;
import org.junit.Test;
/**
*
*/
public class BasicGetValueTest extends AutomatedTestBase
{
private static final int rows = 1023;
private static final int cols = 20;
private static final double sparsity1 = 0.9;
private static final double sparsity2 = 0.1;
private static final double sparsity3 = 0.0;
public enum SparsityType {
DENSE,
SPARSE,
EMPTY,
}
public enum ValueType {
RAND,
RAND_ROUND,
CONST,
}
@Override
public void setUp() {
}
@Test
public void testDenseRandDataCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.RAND, true);
}
@Test
public void testSparseRandDataCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.RAND, true);
}
@Test
public void testEmptyCompression() {
runGetValueTest(SparsityType.EMPTY, ValueType.RAND, true);
}
@Test
public void testDenseRoundRandDataCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.RAND_ROUND, true);
}
@Test
public void testSparseRoundRandDataCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.RAND_ROUND, true);
}
@Test
public void testDenseConstantDataCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.CONST, true);
}
@Test
public void testSparseConstDataCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.CONST, true);
}
@Test
public void testDenseRandDataNoCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.RAND, false);
}
@Test
public void testSparseRandDataNoCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.RAND, false);
}
@Test
public void testEmptyNoCompression() {
runGetValueTest(SparsityType.EMPTY, ValueType.RAND, false);
}
@Test
public void testDenseRoundRandDataNoCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.RAND_ROUND, false);
}
@Test
public void testSparseRoundRandDataNoCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.RAND_ROUND, false);
}
@Test
public void testDenseConstDataNoCompression() {
runGetValueTest(SparsityType.DENSE, ValueType.CONST, false);
}
@Test
public void testSparseConstDataNoCompression() {
runGetValueTest(SparsityType.SPARSE, ValueType.CONST, false);
}
/**
*
* @param mb
*/
private void runGetValueTest(SparsityType sptype, ValueType vtype, boolean compress)
{
try
{
//prepare sparsity for input data
double sparsity = -1;
switch( sptype ){
case DENSE: sparsity = sparsity1; break;
case SPARSE: sparsity = sparsity2; break;
case EMPTY: sparsity = sparsity3; break;
}
//generate input data
double min = (vtype==ValueType.CONST)? 10 : -10;
double[][] input = TestUtils.generateTestMatrix(rows, cols, min, 10, sparsity, 7);
if( vtype==ValueType.RAND_ROUND )
input = TestUtils.round(input);
MatrixBlock mb = DataConverter.convertToMatrixBlock(input);
//compress given matrix block
CompressedMatrixBlock cmb = new CompressedMatrixBlock(mb);
if( compress )
cmb.compress();
//iterate over all cells, get and compare
for(int i=0; i<rows; i++)
for(int j=0; j<cols; j++) {
double ulaVal = mb.getValue(i, j); //calls quickGetValue internally
double claVal = cmb.getValue(i, j); //calls quickGetValue internally
Assert.assertTrue("Get returned wrong value: "+claVal+" (expected: "+ulaVal+")", ulaVal==claVal);
}
}
catch(Exception ex) {
throw new RuntimeException(ex);
}
}
}
| asurve/arvind-sysml | src/test/java/org/apache/sysml/test/integration/functions/compress/BasicGetValueTest.java | Java | apache-2.0 | 4,624 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.milton.http.values;
import io.milton.http.XmlWriter;
import io.milton.http.XmlWriter.Element;
import io.milton.http.webdav.WebDavProtocol;
import java.util.Map;
/**
* Supports HrefList objects, and writes them out as a list of <href>...</href> elements
*
* Currently readonly, but should support writing
*
* @author brad
*/
public class HrefListValueWriter implements ValueWriter {
@Override
public boolean supports(String nsUri, String localName, Class c) {
boolean b = HrefList.class.isAssignableFrom(c);
return b;
}
@Override
public void writeValue(XmlWriter writer, String nsUri, String prefix, String localName, Object val, String href, Map<String, String> nsPrefixes) {
if (val instanceof HrefList) {
Element outerEl = writer.begin(prefix, localName).open();
HrefList list = (HrefList) val;
if (list != null) {
for (String s : list) {
Element hrefEl = writer.begin(WebDavProtocol.DAV_PREFIX + ":href").open(false);
hrefEl.writeText(s);
hrefEl.close();
}
}
outerEl.close();
} else {
if (val != null) {
throw new RuntimeException("Value is not correct type. Is a: " + val.getClass());
}
}
}
@Override
public Object parse(String namespaceURI, String localPart, String value) {
throw new UnsupportedOperationException("Not supported yet.");
}
}
| skoulouzis/lobcder | milton2/milton-server-ce/src/main/java/io/milton/http/values/HrefListValueWriter.java | Java | apache-2.0 | 2,213 |
/**
* This module sets default values and validates ortb2 first part data
* @module modules/firstPartyData
*/
import { config } from '../../src/config.js';
import * as utils from '../../src/utils.js';
import { ORTB_MAP } from './config.js';
import { submodule } from '../../src/hook.js';
import { getStorageManager } from '../../src/storageManager.js';
const STORAGE = getStorageManager();
let optout;
/**
* Check if data passed is empty
* @param {*} value to test against
* @returns {Boolean} is value empty
*/
function isEmptyData(data) {
let check = true;
if (typeof data === 'object' && !utils.isEmpty(data)) {
check = false;
} else if (typeof data !== 'object' && (utils.isNumber(data) || data)) {
check = false;
}
return check;
}
/**
* Check if required keys exist in data object
* @param {Object} data object
* @param {Array} array of required keys
* @param {String} object path (for printing warning)
* @param {Number} index of object value in the data array (for printing warning)
* @returns {Boolean} is requirements fulfilled
*/
function getRequiredData(obj, required, parent, i) {
let check = true;
required.forEach(key => {
if (!obj[key] || isEmptyData(obj[key])) {
check = false;
utils.logWarn(`Filtered ${parent}[] value at index ${i} in ortb2 data: missing required property ${key}`);
}
});
return check;
}
/**
* Check if data type is valid
* @param {*} value to test against
* @param {Object} object containing type definition and if should be array bool
* @returns {Boolean} is type fulfilled
*/
function typeValidation(data, mapping) {
let check = false;
switch (mapping.type) {
case 'string':
if (typeof data === 'string') check = true;
break;
case 'number':
if (typeof data === 'number' && isFinite(data)) check = true;
break;
case 'object':
if (typeof data === 'object') {
if ((Array.isArray(data) && mapping.isArray) || (!Array.isArray(data) && !mapping.isArray)) check = true;
}
break;
}
return check;
}
/**
* Validates ortb2 data arrays and filters out invalid data
* @param {Array} ortb2 data array
* @param {Object} object defining child type and if array
* @param {String} config path of data array
* @param {String} parent path for logging warnings
* @returns {Array} validated/filtered data
*/
export function filterArrayData(arr, child, path, parent) {
arr = arr.filter((index, i) => {
let check = typeValidation(index, {type: child.type, isArray: child.isArray});
if (check && Array.isArray(index) === Boolean(child.isArray)) {
return true;
}
utils.logWarn(`Filtered ${parent}[] value at index ${i} in ortb2 data: expected type ${child.type}`);
}).filter((index, i) => {
let requiredCheck = true;
let mapping = utils.deepAccess(ORTB_MAP, path);
if (mapping && mapping.required) requiredCheck = getRequiredData(index, mapping.required, parent, i);
if (requiredCheck) return true;
}).reduce((result, value, i) => {
let typeBool = false;
let mapping = utils.deepAccess(ORTB_MAP, path);
switch (child.type) {
case 'string':
result.push(value);
break;
case 'object':
if (mapping && mapping.children) {
let validObject = validateFpd(value, path + '.children.', parent + '.');
if (Object.keys(validObject).length) {
let requiredCheck = getRequiredData(validObject, mapping.required, parent, i);
if (requiredCheck) {
result.push(validObject);
typeBool = true;
}
}
} else {
result.push(value);
typeBool = true;
}
break;
}
if (!typeBool) utils.logWarn(`Filtered ${parent}[] value at index ${i} in ortb2 data: expected type ${child.type}`);
return result;
}, []);
return arr;
}
/**
* Validates ortb2 object and filters out invalid data
* @param {Object} ortb2 object
* @param {String} config path of data array
* @param {String} parent path for logging warnings
* @returns {Object} validated/filtered data
*/
export function validateFpd(fpd, path = '', parent = '') {
if (!fpd) return {};
// Filter out imp property if exists
let validObject = Object.assign({}, Object.keys(fpd).filter(key => {
let mapping = utils.deepAccess(ORTB_MAP, path + key);
if (!mapping || !mapping.invalid) return key;
utils.logWarn(`Filtered ${parent}${key} property in ortb2 data: invalid property`);
}).filter(key => {
let mapping = utils.deepAccess(ORTB_MAP, path + key);
// let typeBool = false;
let typeBool = (mapping) ? typeValidation(fpd[key], {type: mapping.type, isArray: mapping.isArray}) : true;
if (typeBool || !mapping) return key;
utils.logWarn(`Filtered ${parent}${key} property in ortb2 data: expected type ${(mapping.isArray) ? 'array' : mapping.type}`);
}).reduce((result, key) => {
let mapping = utils.deepAccess(ORTB_MAP, path + key);
let modified = {};
if (mapping) {
if (mapping.optoutApplies && optout) {
utils.logWarn(`Filtered ${parent}${key} data: pubcid optout found`);
return result;
}
modified = (mapping.type === 'object' && !mapping.isArray)
? validateFpd(fpd[key], path + key + '.children.', parent + key + '.')
: (mapping.isArray && mapping.childType)
? filterArrayData(fpd[key], { type: mapping.childType, isArray: mapping.childisArray }, path + key, parent + key) : fpd[key];
// Check if modified data has data and return
(!isEmptyData(modified)) ? result[key] = modified
: utils.logWarn(`Filtered ${parent}${key} property in ortb2 data: empty data found`);
} else {
result[key] = fpd[key];
}
return result;
}, {}));
// Return validated data
return validObject;
}
/**
* Run validation on global and bidder config data for ortb2
*/
function runValidations(data) {
let conf = validateFpd(data);
let bidderDuplicate = { ...config.getBidderConfig() };
Object.keys(bidderDuplicate).forEach(bidder => {
let modConf = Object.keys(bidderDuplicate[bidder]).reduce((res, key) => {
let valid = (key !== 'ortb2') ? bidderDuplicate[bidder][key] : validateFpd(bidderDuplicate[bidder][key]);
if (valid) res[key] = valid;
return res;
}, {});
if (Object.keys(modConf).length) config.setBidderConfig({ bidders: [bidder], config: modConf });
});
return conf;
}
/**
* Sets default values to ortb2 if exists and adds currency and ortb2 setConfig callbacks on init
*/
export function initSubmodule(fpdConf, data) {
// Checks for existsnece of pubcid optout cookie/storage
// if exists, filters user data out
optout = (STORAGE.cookiesAreEnabled() && STORAGE.getCookie('_pubcid_optout')) ||
(STORAGE.hasLocalStorage() && STORAGE.getDataFromLocalStorage('_pubcid_optout'));
return (!fpdConf.skipValidations) ? runValidations(data) : data;
}
/** @type {firstPartyDataSubmodule} */
export const validationSubmodule = {
name: 'validation',
queue: 1,
init: initSubmodule
}
submodule('firstPartyData', validationSubmodule)
| tchibirev/Prebid.js | modules/validationFpdModule/index.js | JavaScript | apache-2.0 | 7,184 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.binaryauthorization.v1beta1.model;
/**
* Request message for ValidationHelperV1.ValidateAttestationOccurrence.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Binary Authorization API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ValidateAttestationOccurrenceRequest extends com.google.api.client.json.GenericJson {
/**
* Required. An AttestationOccurrence to be checked that it can be verified by the Attestor. It
* does not have to be an existing entity in Container Analysis. It must otherwise be a valid
* AttestationOccurrence.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private AttestationOccurrence attestation;
/**
* Required. The resource name of the Note to which the containing Occurrence is associated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String occurrenceNote;
/**
* Required. The URI of the artifact (e.g. container image) that is the subject of the containing
* Occurrence.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String occurrenceResourceUri;
/**
* Required. An AttestationOccurrence to be checked that it can be verified by the Attestor. It
* does not have to be an existing entity in Container Analysis. It must otherwise be a valid
* AttestationOccurrence.
* @return value or {@code null} for none
*/
public AttestationOccurrence getAttestation() {
return attestation;
}
/**
* Required. An AttestationOccurrence to be checked that it can be verified by the Attestor. It
* does not have to be an existing entity in Container Analysis. It must otherwise be a valid
* AttestationOccurrence.
* @param attestation attestation or {@code null} for none
*/
public ValidateAttestationOccurrenceRequest setAttestation(AttestationOccurrence attestation) {
this.attestation = attestation;
return this;
}
/**
* Required. The resource name of the Note to which the containing Occurrence is associated.
* @return value or {@code null} for none
*/
public java.lang.String getOccurrenceNote() {
return occurrenceNote;
}
/**
* Required. The resource name of the Note to which the containing Occurrence is associated.
* @param occurrenceNote occurrenceNote or {@code null} for none
*/
public ValidateAttestationOccurrenceRequest setOccurrenceNote(java.lang.String occurrenceNote) {
this.occurrenceNote = occurrenceNote;
return this;
}
/**
* Required. The URI of the artifact (e.g. container image) that is the subject of the containing
* Occurrence.
* @return value or {@code null} for none
*/
public java.lang.String getOccurrenceResourceUri() {
return occurrenceResourceUri;
}
/**
* Required. The URI of the artifact (e.g. container image) that is the subject of the containing
* Occurrence.
* @param occurrenceResourceUri occurrenceResourceUri or {@code null} for none
*/
public ValidateAttestationOccurrenceRequest setOccurrenceResourceUri(java.lang.String occurrenceResourceUri) {
this.occurrenceResourceUri = occurrenceResourceUri;
return this;
}
@Override
public ValidateAttestationOccurrenceRequest set(String fieldName, Object value) {
return (ValidateAttestationOccurrenceRequest) super.set(fieldName, value);
}
@Override
public ValidateAttestationOccurrenceRequest clone() {
return (ValidateAttestationOccurrenceRequest) super.clone();
}
}
| googleapis/google-api-java-client-services | clients/google-api-services-binaryauthorization/v1beta1/1.31.0/com/google/api/services/binaryauthorization/v1beta1/model/ValidateAttestationOccurrenceRequest.java | Java | apache-2.0 | 4,580 |
/* Generated By:JavaCC: Do not edit this line. SelectorParserImpl.java */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms.selector.parser;
import java.io.*;
import java.util.*;
import org.apache.qpid.jms.selector.filter.*;
/**
* JMS Selector Parser generated by JavaCC
*
* Do not edit this .java file directly - it is generated from SelectorParserImpl.jj
* Edit SelectorParserImpl.jj and rebuild with the 'generate-selector-parser' profile.
*/
public class SelectorParserImpl implements SelectorParserImplConstants {
private BooleanExpression asBooleanExpression(Expression value) throws ParseException {
if (value instanceof BooleanExpression) {
return (BooleanExpression) value;
}
if (value instanceof PropertyExpression) {
return UnaryExpression.createBooleanCast( value );
}
throw new ParseException("Expression will not result in a boolean value: " + value);
}
// ----------------------------------------------------------------------------
// Grammer
// ----------------------------------------------------------------------------
final public BooleanExpression JmsSelector() throws ParseException {
Expression left=null;
left = orExpression();
{if (true) return asBooleanExpression(left);}
throw new Error("Missing return statement in function");
}
final public Expression orExpression() throws ParseException {
Expression left;
Expression right;
left = andExpression();
label_1:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case OR:
;
break;
default:
break label_1;
}
jj_consume_token(OR);
right = andExpression();
left = LogicExpression.createOR(asBooleanExpression(left), asBooleanExpression(right));
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression andExpression() throws ParseException {
Expression left;
Expression right;
left = equalityExpression();
label_2:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case AND:
;
break;
default:
break label_2;
}
jj_consume_token(AND);
right = equalityExpression();
left = LogicExpression.createAND(asBooleanExpression(left), asBooleanExpression(right));
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression equalityExpression() throws ParseException {
Expression left;
Expression right;
left = comparisonExpression();
label_3:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case IS:
case 27:
case 28:
;
break;
default:
break label_3;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 27:
jj_consume_token(27);
right = comparisonExpression();
left = ComparisonExpression.createEqual(left, right);
break;
case 28:
jj_consume_token(28);
right = comparisonExpression();
left = ComparisonExpression.createNotEqual(left, right);
break;
default:
if (jj_2_1(2)) {
jj_consume_token(IS);
jj_consume_token(NULL);
left = ComparisonExpression.createIsNull(left);
} else {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case IS:
jj_consume_token(IS);
jj_consume_token(NOT);
jj_consume_token(NULL);
left = ComparisonExpression.createIsNotNull(left);
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
}
}
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression comparisonExpression() throws ParseException {
Expression left;
Expression right;
Expression low;
Expression high;
String t, u;
boolean not;
ArrayList list;
left = addExpression();
label_4:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case NOT:
case BETWEEN:
case LIKE:
case IN:
case 29:
case 30:
case 31:
case 32:
;
break;
default:
break label_4;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 29:
jj_consume_token(29);
right = addExpression();
left = ComparisonExpression.createGreaterThan(left, right);
break;
case 30:
jj_consume_token(30);
right = addExpression();
left = ComparisonExpression.createGreaterThanEqual(left, right);
break;
case 31:
jj_consume_token(31);
right = addExpression();
left = ComparisonExpression.createLessThan(left, right);
break;
case 32:
jj_consume_token(32);
right = addExpression();
left = ComparisonExpression.createLessThanEqual(left, right);
break;
case LIKE:
u=null;
jj_consume_token(LIKE);
t = stringLitteral();
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case ESCAPE:
jj_consume_token(ESCAPE);
u = stringLitteral();
break;
default:
;
}
left = ComparisonExpression.createLike(left, t, u);
break;
default:
if (jj_2_2(2)) {
u=null;
jj_consume_token(NOT);
jj_consume_token(LIKE);
t = stringLitteral();
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case ESCAPE:
jj_consume_token(ESCAPE);
u = stringLitteral();
break;
default:
;
}
left = ComparisonExpression.createNotLike(left, t, u);
} else {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case BETWEEN:
jj_consume_token(BETWEEN);
low = addExpression();
jj_consume_token(AND);
high = addExpression();
left = ComparisonExpression.createBetween(left, low, high);
break;
default:
if (jj_2_3(2)) {
jj_consume_token(NOT);
jj_consume_token(BETWEEN);
low = addExpression();
jj_consume_token(AND);
high = addExpression();
left = ComparisonExpression.createNotBetween(left, low, high);
} else {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case IN:
jj_consume_token(IN);
jj_consume_token(33);
t = stringLitteral();
list = new ArrayList();
list.add( t );
label_5:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 34:
;
break;
default:
break label_5;
}
jj_consume_token(34);
t = stringLitteral();
list.add( t );
}
jj_consume_token(35);
left = ComparisonExpression.createInFilter(left, list);
break;
default:
if (jj_2_4(2)) {
jj_consume_token(NOT);
jj_consume_token(IN);
jj_consume_token(33);
t = stringLitteral();
list = new ArrayList();
list.add( t );
label_6:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 34:
;
break;
default:
break label_6;
}
jj_consume_token(34);
t = stringLitteral();
list.add( t );
}
jj_consume_token(35);
left = ComparisonExpression.createNotInFilter(left, list);
} else {
jj_consume_token(-1);
throw new ParseException();
}
}
}
}
}
}
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression addExpression() throws ParseException {
Expression left;
Expression right;
left = multExpr();
label_7:
while (true) {
if (jj_2_5(2147483647)) {
;
} else {
break label_7;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 36:
jj_consume_token(36);
right = multExpr();
left = ArithmeticExpression.createPlus(left, right);
break;
case 37:
jj_consume_token(37);
right = multExpr();
left = ArithmeticExpression.createMinus(left, right);
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression multExpr() throws ParseException {
Expression left;
Expression right;
left = unaryExpr();
label_8:
while (true) {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 38:
case 39:
case 40:
;
break;
default:
break label_8;
}
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 38:
jj_consume_token(38);
right = unaryExpr();
left = ArithmeticExpression.createMultiply(left, right);
break;
case 39:
jj_consume_token(39);
right = unaryExpr();
left = ArithmeticExpression.createDivide(left, right);
break;
case 40:
jj_consume_token(40);
right = unaryExpr();
left = ArithmeticExpression.createMod(left, right);
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression unaryExpr() throws ParseException {
String s=null;
Expression left=null;
if (jj_2_6(2147483647)) {
jj_consume_token(36);
left = unaryExpr();
} else {
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case 37:
jj_consume_token(37);
left = unaryExpr();
left = UnaryExpression.createNegate(left);
break;
case NOT:
jj_consume_token(NOT);
left = unaryExpr();
left = UnaryExpression.createNOT( asBooleanExpression(left) );
break;
case TRUE:
case FALSE:
case NULL:
case DECIMAL_LITERAL:
case HEX_LITERAL:
case OCTAL_LITERAL:
case FLOATING_POINT_LITERAL:
case STRING_LITERAL:
case ID:
case QUOTED_ID:
case 33:
left = primaryExpr();
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public Expression primaryExpr() throws ParseException {
Expression left=null;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case TRUE:
case FALSE:
case NULL:
case DECIMAL_LITERAL:
case HEX_LITERAL:
case OCTAL_LITERAL:
case FLOATING_POINT_LITERAL:
case STRING_LITERAL:
left = literal();
break;
case ID:
case QUOTED_ID:
left = variable();
break;
case 33:
jj_consume_token(33);
left = orExpression();
jj_consume_token(35);
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public ConstantExpression literal() throws ParseException {
Token t;
String s;
ConstantExpression left=null;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case STRING_LITERAL:
s = stringLitteral();
left = new ConstantExpression(s);
break;
case DECIMAL_LITERAL:
t = jj_consume_token(DECIMAL_LITERAL);
left = ConstantExpression.createFromDecimal(t.image);
break;
case HEX_LITERAL:
t = jj_consume_token(HEX_LITERAL);
left = ConstantExpression.createFromHex(t.image);
break;
case OCTAL_LITERAL:
t = jj_consume_token(OCTAL_LITERAL);
left = ConstantExpression.createFromOctal(t.image);
break;
case FLOATING_POINT_LITERAL:
t = jj_consume_token(FLOATING_POINT_LITERAL);
left = ConstantExpression.createFloat(t.image);
break;
case TRUE:
jj_consume_token(TRUE);
left = ConstantExpression.TRUE;
break;
case FALSE:
jj_consume_token(FALSE);
left = ConstantExpression.FALSE;
break;
case NULL:
jj_consume_token(NULL);
left = ConstantExpression.NULL;
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
final public String stringLitteral() throws ParseException {
Token t;
StringBuffer rc = new StringBuffer();
boolean first=true;
t = jj_consume_token(STRING_LITERAL);
// Decode the sting value.
String image = t.image;
for( int i=1; i < image.length()-1; i++ ) {
char c = image.charAt(i);
if( c == '\u005c'' )
i++;
rc.append(c);
}
{if (true) return rc.toString();}
throw new Error("Missing return statement in function");
}
final public PropertyExpression variable() throws ParseException {
Token t;
PropertyExpression left=null;
switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
case ID:
t = jj_consume_token(ID);
left = new PropertyExpression(t.image);
break;
case QUOTED_ID:
t = jj_consume_token(QUOTED_ID);
// Decode the string value.
StringBuffer rc = new StringBuffer();
String image = t.image;
for( int i=1; i < image.length()-1; i++ ) {
char c = image.charAt(i);
if( c == '"' )
i++;
rc.append(c);
}
{if (true) return new PropertyExpression(rc.toString());}
break;
default:
jj_consume_token(-1);
throw new ParseException();
}
{if (true) return left;}
throw new Error("Missing return statement in function");
}
private boolean jj_2_1(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_1(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_2_2(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_2(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_2_3(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_3(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_2_4(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_4(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_2_5(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_5(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_2_6(int xla) {
jj_la = xla; jj_lastpos = jj_scanpos = token;
try { return !jj_3_6(); }
catch(LookaheadSuccess ls) { return true; }
}
private boolean jj_3R_53() {
if (jj_scan_token(LIKE)) return true;
if (jj_3R_38()) return true;
Token xsp;
xsp = jj_scanpos;
if (jj_3R_58()) jj_scanpos = xsp;
return false;
}
private boolean jj_3R_27() {
if (jj_scan_token(DECIMAL_LITERAL)) return true;
return false;
}
private boolean jj_3R_36() {
if (jj_3R_39()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_40()) { jj_scanpos = xsp; break; }
}
return false;
}
private boolean jj_3R_52() {
if (jj_scan_token(32)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_57() {
if (jj_scan_token(37)) return true;
if (jj_3R_9()) return true;
return false;
}
private boolean jj_3_5() {
Token xsp;
xsp = jj_scanpos;
if (jj_scan_token(36)) {
jj_scanpos = xsp;
if (jj_scan_token(37)) return true;
}
if (jj_3R_9()) return true;
return false;
}
private boolean jj_3R_26() {
if (jj_3R_38()) return true;
return false;
}
private boolean jj_3R_51() {
if (jj_scan_token(31)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_35() {
if (jj_scan_token(QUOTED_ID)) return true;
return false;
}
private boolean jj_3R_56() {
if (jj_scan_token(36)) return true;
if (jj_3R_9()) return true;
return false;
}
private boolean jj_3R_37() {
if (jj_scan_token(OR)) return true;
if (jj_3R_36()) return true;
return false;
}
private boolean jj_3R_23() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_26()) {
jj_scanpos = xsp;
if (jj_3R_27()) {
jj_scanpos = xsp;
if (jj_3R_28()) {
jj_scanpos = xsp;
if (jj_3R_29()) {
jj_scanpos = xsp;
if (jj_3R_30()) {
jj_scanpos = xsp;
if (jj_3R_31()) {
jj_scanpos = xsp;
if (jj_3R_32()) {
jj_scanpos = xsp;
if (jj_3R_33()) return true;
}
}
}
}
}
}
}
return false;
}
private boolean jj_3R_50() {
if (jj_scan_token(30)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_34() {
if (jj_scan_token(ID)) return true;
return false;
}
private boolean jj_3R_48() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_56()) {
jj_scanpos = xsp;
if (jj_3R_57()) return true;
}
return false;
}
private boolean jj_3R_49() {
if (jj_scan_token(29)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_44() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_49()) {
jj_scanpos = xsp;
if (jj_3R_50()) {
jj_scanpos = xsp;
if (jj_3R_51()) {
jj_scanpos = xsp;
if (jj_3R_52()) {
jj_scanpos = xsp;
if (jj_3R_53()) {
jj_scanpos = xsp;
if (jj_3_2()) {
jj_scanpos = xsp;
if (jj_3R_54()) {
jj_scanpos = xsp;
if (jj_3_3()) {
jj_scanpos = xsp;
if (jj_3R_55()) {
jj_scanpos = xsp;
if (jj_3_4()) return true;
}
}
}
}
}
}
}
}
}
return false;
}
private boolean jj_3R_24() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_34()) {
jj_scanpos = xsp;
if (jj_3R_35()) return true;
}
return false;
}
private boolean jj_3R_43() {
if (jj_3R_9()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_48()) { jj_scanpos = xsp; break; }
}
return false;
}
private boolean jj_3R_25() {
if (jj_3R_36()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_37()) { jj_scanpos = xsp; break; }
}
return false;
}
private boolean jj_3R_22() {
if (jj_scan_token(33)) return true;
if (jj_3R_25()) return true;
if (jj_scan_token(35)) return true;
return false;
}
private boolean jj_3R_21() {
if (jj_3R_24()) return true;
return false;
}
private boolean jj_3R_61() {
if (jj_scan_token(34)) return true;
if (jj_3R_38()) return true;
return false;
}
private boolean jj_3R_20() {
if (jj_3R_23()) return true;
return false;
}
private boolean jj_3R_19() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_20()) {
jj_scanpos = xsp;
if (jj_3R_21()) {
jj_scanpos = xsp;
if (jj_3R_22()) return true;
}
}
return false;
}
private boolean jj_3R_41() {
if (jj_3R_43()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_44()) { jj_scanpos = xsp; break; }
}
return false;
}
private boolean jj_3R_38() {
if (jj_scan_token(STRING_LITERAL)) return true;
return false;
}
private boolean jj_3R_15() {
if (jj_3R_19()) return true;
return false;
}
private boolean jj_3R_59() {
if (jj_scan_token(ESCAPE)) return true;
if (jj_3R_38()) return true;
return false;
}
private boolean jj_3_4() {
if (jj_scan_token(NOT)) return true;
if (jj_scan_token(IN)) return true;
if (jj_scan_token(33)) return true;
if (jj_3R_38()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_61()) { jj_scanpos = xsp; break; }
}
if (jj_scan_token(35)) return true;
return false;
}
private boolean jj_3_6() {
if (jj_scan_token(36)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_14() {
if (jj_scan_token(NOT)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_60() {
if (jj_scan_token(34)) return true;
if (jj_3R_38()) return true;
return false;
}
private boolean jj_3R_47() {
if (jj_scan_token(IS)) return true;
if (jj_scan_token(NOT)) return true;
if (jj_scan_token(NULL)) return true;
return false;
}
private boolean jj_3R_13() {
if (jj_scan_token(37)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_33() {
if (jj_scan_token(NULL)) return true;
return false;
}
private boolean jj_3_1() {
if (jj_scan_token(IS)) return true;
if (jj_scan_token(NULL)) return true;
return false;
}
private boolean jj_3R_12() {
if (jj_scan_token(36)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_46() {
if (jj_scan_token(28)) return true;
if (jj_3R_41()) return true;
return false;
}
private boolean jj_3R_10() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_12()) {
jj_scanpos = xsp;
if (jj_3R_13()) {
jj_scanpos = xsp;
if (jj_3R_14()) {
jj_scanpos = xsp;
if (jj_3R_15()) return true;
}
}
}
return false;
}
private boolean jj_3R_32() {
if (jj_scan_token(FALSE)) return true;
return false;
}
private boolean jj_3R_55() {
if (jj_scan_token(IN)) return true;
if (jj_scan_token(33)) return true;
if (jj_3R_38()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_60()) { jj_scanpos = xsp; break; }
}
if (jj_scan_token(35)) return true;
return false;
}
private boolean jj_3R_45() {
if (jj_scan_token(27)) return true;
if (jj_3R_41()) return true;
return false;
}
private boolean jj_3R_42() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_45()) {
jj_scanpos = xsp;
if (jj_3R_46()) {
jj_scanpos = xsp;
if (jj_3_1()) {
jj_scanpos = xsp;
if (jj_3R_47()) return true;
}
}
}
return false;
}
private boolean jj_3R_31() {
if (jj_scan_token(TRUE)) return true;
return false;
}
private boolean jj_3_3() {
if (jj_scan_token(NOT)) return true;
if (jj_scan_token(BETWEEN)) return true;
if (jj_3R_43()) return true;
if (jj_scan_token(AND)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_18() {
if (jj_scan_token(40)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_30() {
if (jj_scan_token(FLOATING_POINT_LITERAL)) return true;
return false;
}
private boolean jj_3R_54() {
if (jj_scan_token(BETWEEN)) return true;
if (jj_3R_43()) return true;
if (jj_scan_token(AND)) return true;
if (jj_3R_43()) return true;
return false;
}
private boolean jj_3R_39() {
if (jj_3R_41()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_42()) { jj_scanpos = xsp; break; }
}
return false;
}
private boolean jj_3R_17() {
if (jj_scan_token(39)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_29() {
if (jj_scan_token(OCTAL_LITERAL)) return true;
return false;
}
private boolean jj_3R_58() {
if (jj_scan_token(ESCAPE)) return true;
if (jj_3R_38()) return true;
return false;
}
private boolean jj_3_2() {
if (jj_scan_token(NOT)) return true;
if (jj_scan_token(LIKE)) return true;
if (jj_3R_38()) return true;
Token xsp;
xsp = jj_scanpos;
if (jj_3R_59()) jj_scanpos = xsp;
return false;
}
private boolean jj_3R_16() {
if (jj_scan_token(38)) return true;
if (jj_3R_10()) return true;
return false;
}
private boolean jj_3R_11() {
Token xsp;
xsp = jj_scanpos;
if (jj_3R_16()) {
jj_scanpos = xsp;
if (jj_3R_17()) {
jj_scanpos = xsp;
if (jj_3R_18()) return true;
}
}
return false;
}
private boolean jj_3R_40() {
if (jj_scan_token(AND)) return true;
if (jj_3R_39()) return true;
return false;
}
private boolean jj_3R_28() {
if (jj_scan_token(HEX_LITERAL)) return true;
return false;
}
private boolean jj_3R_9() {
if (jj_3R_10()) return true;
Token xsp;
while (true) {
xsp = jj_scanpos;
if (jj_3R_11()) { jj_scanpos = xsp; break; }
}
return false;
}
/** Generated Token Manager. */
public SelectorParserImplTokenManager token_source;
SimpleCharStream jj_input_stream;
/** Current token. */
public Token token;
/** Next token. */
public Token jj_nt;
private int jj_ntk;
private Token jj_scanpos, jj_lastpos;
private int jj_la;
/** Constructor with InputStream. */
public SelectorParserImpl(java.io.InputStream stream) {
this(stream, null);
}
/** Constructor with InputStream and supplied encoding */
public SelectorParserImpl(java.io.InputStream stream, String encoding) {
try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source = new SelectorParserImplTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
}
/** Reinitialise. */
public void ReInit(java.io.InputStream stream) {
ReInit(stream, null);
}
/** Reinitialise. */
public void ReInit(java.io.InputStream stream, String encoding) {
try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); }
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
}
/** Constructor. */
public SelectorParserImpl(java.io.Reader stream) {
jj_input_stream = new SimpleCharStream(stream, 1, 1);
token_source = new SelectorParserImplTokenManager(jj_input_stream);
token = new Token();
jj_ntk = -1;
}
/** Reinitialise. */
public void ReInit(java.io.Reader stream) {
jj_input_stream.ReInit(stream, 1, 1);
token_source.ReInit(jj_input_stream);
token = new Token();
jj_ntk = -1;
}
/** Constructor with generated Token Manager. */
public SelectorParserImpl(SelectorParserImplTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
}
/** Reinitialise. */
public void ReInit(SelectorParserImplTokenManager tm) {
token_source = tm;
token = new Token();
jj_ntk = -1;
}
private Token jj_consume_token(int kind) throws ParseException {
Token oldToken;
if ((oldToken = token).next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
if (token.kind == kind) {
return token;
}
token = oldToken;
throw generateParseException();
}
static private final class LookaheadSuccess extends java.lang.Error { }
final private LookaheadSuccess jj_ls = new LookaheadSuccess();
private boolean jj_scan_token(int kind) {
if (jj_scanpos == jj_lastpos) {
jj_la--;
if (jj_scanpos.next == null) {
jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken();
} else {
jj_lastpos = jj_scanpos = jj_scanpos.next;
}
} else {
jj_scanpos = jj_scanpos.next;
}
if (jj_scanpos.kind != kind) return true;
if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls;
return false;
}
/** Get the next Token. */
final public Token getNextToken() {
if (token.next != null) token = token.next;
else token = token.next = token_source.getNextToken();
jj_ntk = -1;
return token;
}
/** Get the specific Token. */
final public Token getToken(int index) {
Token t = token;
for (int i = 0; i < index; i++) {
if (t.next != null) t = t.next;
else t = t.next = token_source.getNextToken();
}
return t;
}
private int jj_ntk() {
if ((jj_nt=token.next) == null)
return (jj_ntk = (token.next=token_source.getNextToken()).kind);
else
return (jj_ntk = jj_nt.kind);
}
/** Generate ParseException. */
public ParseException generateParseException() {
Token errortok = token.next;
int line = errortok.beginLine, column = errortok.beginColumn;
String mess = (errortok.kind == 0) ? tokenImage[0] : errortok.image;
return new ParseException("Parse error at line " + line + ", column " + column + ". Encountered: " + mess);
}
/** Enable tracing. */
final public void enable_tracing() {
}
/** Disable tracing. */
final public void disable_tracing() {
}
}
| avranju/qpid-jms | qpid-jms-client/src/main/java/org/apache/qpid/jms/selector/parser/SelectorParserImpl.java | Java | apache-2.0 | 30,987 |
package com.mentor.nucleus.bp.model.compare.contentmergeviewer;
//=====================================================================
//
//File: $RCSfile: ModelMergeViewer.java,v $
//Version: $Revision: 1.2 $
//Modified: $Date: 2013/01/17 03:35:34 $
//
//(c) Copyright 2013-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import com.mentor.nucleus.bp.core.Ooaofooa;
public abstract class ModelMergeViewer extends Viewer {
public static int LEFT = 0;
public static int RIGHT = 1;
public static int ANCESTOR = 2;
private Object key;
private int type;
private Ooaofooa compareRoot;
public abstract Control createControl(Composite parent);
public Object getKey() {
return key;
}
public int getType() {
return type;
}
public Ooaofooa getCompareRoot() {
return compareRoot;
}
public void setKey(Object key) {
this.key = key;
}
public void setType(int type) {
this.type = type;
}
public void setCompareRoot(Ooaofooa compareRoot) {
this.compareRoot = compareRoot;
}
public abstract String getTitle();
}
| HebaKhaled/bposs | src/com.mentor.nucleus.bp.model.compare/src/com/mentor/nucleus/bp/model/compare/contentmergeviewer/ModelMergeViewer.java | Java | apache-2.0 | 1,964 |
/**
* Created by Daniel Eaton on 12/11/2016.
*/
import
{Component} from "@angular/core";
import {Router} from "@angular/router";
@Component({
templateUrl: "./templates/settings.php"
})
export class SettingsComponent { }
| DannyEaton/growify | app/components/settings-component.ts | TypeScript | apache-2.0 | 225 |
/*
* Orika - simpler, better and faster Java bean mapping
*
* Copyright (C) 2011-2013 Orika authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ma.glasnost.orika.test.community;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.List;
import ma.glasnost.orika.CustomConverter;
import ma.glasnost.orika.MapperFactory;
import ma.glasnost.orika.MappingContext;
import ma.glasnost.orika.impl.ConfigurableMapper;
import ma.glasnost.orika.metadata.Type;
import org.junit.Test;
/**
* Allow converters for Lists (or other collections).
* <p>
*
* @see <a href="https://code.google.com/archive/p/orika/issues/44">https://code.google.com/archive/p/orika/</a>
*/
public class Issue44TestCase {
@Test
public void shouldMapCollection() {
ConfigurableMapper mapper = new ConfigurableMapper() {
@Override
protected void configure(MapperFactory factory) {
factory.classMap(Order.class, OrderDto.class).byDefault().register();
factory.classMap(Product.class, ProductDto.class).byDefault().register();
}
};
Order order = new Order();
Product product = new Product();
product.setName("myName");
order.setProducts(asList(product));
OrderDto orderDto = mapper.map(order, OrderDto.class);
assertThat(orderDto.getProducts(), hasSize(1));
assertThat(orderDto.getProducts().get(0).getName(), is(equalTo("myName")));
}
@Test
public void shouldMapCollectionWithConverter() {
ConfigurableMapper mapper = new ConfigurableMapper() {
@Override
protected void configure(MapperFactory factory) {
factory.getConverterFactory().registerConverter("productToName", new CustomConverter<List<Product>, List<String>>() {
public List<String> convert(List<Product> source, Type<? extends List<String>> destinationType, MappingContext context) {
ArrayList<String> list = new ArrayList<String>(source.size());
for (Product product : source) {
list.add(product.getName());
}
return list;
}
});
factory.classMap(Order.class, OrderDto.class)
.fieldMap("products", "productNames")
.converter("productToName")
.add()
.register();
factory.classMap(Product.class, ProductDto.class).byDefault().register();
}
};
Order order = new Order();
Product product = new Product();
product.setName("myName");
order.setProducts(asList(product));
OrderDto orderDto = mapper.map(order, OrderDto.class);
assertThat(orderDto.getProductNames(), hasSize(1));
assertThat(orderDto.getProductNames().get(0), is(equalTo("myName")));
}
@Test
public void shouldMapCollectionWithElementConverter_ToCollection() {
ConfigurableMapper mapper = new ConfigurableMapper() {
@Override
protected void configure(MapperFactory factory) {
factory.getConverterFactory().registerConverter("productToName", new CustomConverter<Product, String>() {
public String convert(Product source, Type<? extends String> destinationType, MappingContext context) {
return source.getName();
}
});
factory.classMap(Order.class, OrderDto.class)
.fieldMap("products", "productNames")
.converter("productToName")
.add()
.register();
factory.classMap(Product.class, ProductDto.class).byDefault().register();
}
};
Order order = new Order();
Product product = new Product();
product.setName("myName");
order.setProducts(asList(product));
OrderDto orderDto = mapper.map(order, OrderDto.class);
assertThat(orderDto.getProductNames(), hasSize(1));
assertThat(orderDto.getProductNames().get(0), is(equalTo("myName")));
}
@Test
public void shouldMapCollectionWithElementConverter_ToArray() {
ConfigurableMapper mapper = new ConfigurableMapper() {
@Override
protected void configure(MapperFactory factory) {
factory.getConverterFactory().registerConverter("productToName", new CustomConverter<Product, String>() {
public String convert(Product source, Type<? extends String> destinationType, MappingContext context) {
return source.getName();
}
});
factory.classMap(Order.class, OrderDto2.class)
.fieldMap("products", "productNames")
.converter("productToName")
.add()
.register();
factory.classMap(Product.class, ProductDto.class).byDefault().register();
}
};
Order order = new Order();
Product product = new Product();
product.setName("myName");
order.setProducts(asList(product));
OrderDto2 orderDto = mapper.map(order, OrderDto2.class);
assertThat(orderDto.getProductNames(), arrayWithSize(1));
assertThat(orderDto.getProductNames()[0], is(equalTo("myName")));
}
public static class Product {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class Order {
private List<Product> products;
public List<Product> getProducts() {
return products;
}
public void setProducts(List<Product> products) {
this.products = products;
}
}
public static class ProductDto {
private String name;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class OrderDto {
private List<ProductDto> products;
private List<String> productNames;
public List<ProductDto> getProducts() {
return products;
}
public void setProducts(List<ProductDto> products) {
this.products = products;
}
public List<String> getProductNames() {
return productNames;
}
public void setProductNames(List<String> productNames) {
this.productNames = productNames;
}
}
public static class OrderDto2 {
private List<ProductDto> products;
private String[] productNames;
public List<ProductDto> getProducts() {
return products;
}
public void setProducts(List<ProductDto> products) {
this.products = products;
}
public String[] getProductNames() {
return productNames;
}
public void setProductNames(String[] productNames) {
this.productNames = productNames;
}
}
} | andreabertagnolli/orika | tests/src/main/java/ma/glasnost/orika/test/community/Issue44TestCase.java | Java | apache-2.0 | 8,357 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Spanner.Admin.Database.V1.Snippets
{
// [START spanner_v1_generated_DatabaseAdmin_ListDatabases_sync]
using Google.Api.Gax;
using Google.Cloud.Spanner.Admin.Database.V1;
using Google.Cloud.Spanner.Common.V1;
using System;
public sealed partial class GeneratedDatabaseAdminClientSnippets
{
/// <summary>Snippet for ListDatabases</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public void ListDatabasesRequestObject()
{
// Create client
DatabaseAdminClient databaseAdminClient = DatabaseAdminClient.Create();
// Initialize request argument(s)
ListDatabasesRequest request = new ListDatabasesRequest
{
ParentAsInstanceName = InstanceName.FromProjectInstance("[PROJECT]", "[INSTANCE]"),
};
// Make the request
PagedEnumerable<ListDatabasesResponse, Database> response = databaseAdminClient.ListDatabases(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (Database item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListDatabasesResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (Database item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<Database> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (Database item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
}
}
// [END spanner_v1_generated_DatabaseAdmin_ListDatabases_sync]
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Spanner.Admin.Database.V1/Google.Cloud.Spanner.Admin.Database.V1.GeneratedSnippets/DatabaseAdminClient.ListDatabasesRequestObjectSnippet.g.cs | C# | apache-2.0 | 3,278 |
/**
* <copyright>
* </copyright>
*
*
*/
package eu.hyvar.mspl.manifest.resource.hymanifest.ui;
/**
* The BracketHandler is responsible for handling the input of brackets. It
* automatically adds closing brackets, if the opening counterpart is entered in
* editors. It does also ignore the input of closing brackets, if these were
* automatically inserted right before.
*/
public interface IHymanifestBracketHandler {
/**
* If a closing bracket was added right before, this method returns true.
*/
public boolean addedClosingBracket();
/**
* Returns the last closing bracket that was added automatically.
*/
public String getClosingBracket();
}
| HyVar/DarwinSPL | plugins/eu.hyvar.mspl.manifest.resource.hymanifest.ui/src-gen/eu/hyvar/mspl/manifest/resource/hymanifest/ui/IHymanifestBracketHandler.java | Java | apache-2.0 | 675 |
package org.apache.maven.plugins.site;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.lang.StringUtils;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Parameter;
/**
* Abstract base class for staging mojos.
*
* @author hboutemy
* @since 3.3
*/
public abstract class AbstractStagingMojo
extends AbstractDeployMojo
{
/**
* Top distribution management site url, for manual configuration when auto-calculated value
* doesn't match expectations. Relative module directory will be calculated from this url.
*/
@Parameter( property = "topSiteURL" )
protected String topSiteURL;
/**
* The String "staging/".
*/
protected static final String DEFAULT_STAGING_DIRECTORY = "staging/";
/**
* By default, staging mojos will get their top distribution management site url by getting top parent
* with the same site, which is a good heuristics. But in case the default value doesn't match
* expectations, <code>topSiteURL</code> can be configured: it will be used instead.
*/
@Override
protected String determineTopDistributionManagementSiteUrl()
throws MojoExecutionException
{
return ( StringUtils.isEmpty( topSiteURL ) ) ? getSite( getTopLevelProject( project ) ).getUrl() : topSiteURL;
}
}
| khmarbaise/maven-plugins | maven-site-plugin/src/main/java/org/apache/maven/plugins/site/AbstractStagingMojo.java | Java | apache-2.0 | 2,132 |
'''
Copyright 2015 University of Auckland
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
class RespirationConstants(object):
class ExpirationType(object):
PASSIVE = 0
ACTIVE = 1
| OpenCMISS/neon | src/opencmiss/neon/core/problems/constants.py | Python | apache-2.0 | 714 |