repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
JaLandry/MeasureAuthoringTool_LatestSprint | mat/src/mat/model/clause/QDSAttributes.java | 1540 | package mat.model.clause;
import com.google.gwt.user.client.rpc.IsSerializable;
/**
* The Class QDSAttributes.
*/
public class QDSAttributes implements IsSerializable {
/** The id. */
private String id;
/** The name. */
private String name;
/** The data type id. */
private String dataTypeId;
/** The q ds attribute type. */
private String qDSAttributeType;
/**
* Gets the id.
*
* @return the id
*/
public String getId() {
return id;
}
/**
* Sets the id.
*
* @param id
* the new id
*/
public void setId(String id) {
this.id = id;
}
/**
* Gets the name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Sets the name.
*
* @param name
* the new name
*/
public void setName(String name) {
this.name = name;
}
/**
* Gets the data type id.
*
* @return the data type id
*/
public String getDataTypeId() {
return dataTypeId;
}
/**
* Sets the data type id.
*
* @param dataTypeId
* the new data type id
*/
public void setDataTypeId(String dataTypeId) {
this.dataTypeId = dataTypeId;
}
/**
* Gets the q ds attribute type.
*
* @return the q ds attribute type
*/
public String getqDSAttributeType() {
return qDSAttributeType;
}
/**
* Sets the q ds attribute type.
*
* @param qDSAttributeType
* the new q ds attribute type
*/
public void setqDSAttributeType(String qDSAttributeType) {
this.qDSAttributeType = qDSAttributeType;
}
}
| apache-2.0 |
tfr42/j2ep | src/test/java/net/sf/j2ep/test/DirectoryRuleTest.java | 4706 | /*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.j2ep.test;
import junit.framework.TestCase;
import net.sf.j2ep.rules.DirectoryRule;
public class DirectoryRuleTest extends TestCase {
private DirectoryRule dirRule;
protected void setUp() throws Exception {
dirRule = new DirectoryRule();
}
public void testSetDirectory() {
dirRule.setDirectory("/test/");
assertEquals("The directory didn't get saves properly", "/test/", dirRule.getDirectory());
dirRule.setDirectory("/test");
assertEquals("The DirectoryRule should add a slash at the end if there isn't one in the input", "/test/", dirRule.getDirectory());
dirRule.setDirectory("test");
assertEquals("The DirectoryRule should add slash at start and end", "/test/", dirRule.getDirectory());
dirRule.setDirectory("");
assertEquals("The DirectoryRule should make this '/'", "/", dirRule.getDirectory());
try {
dirRule.setDirectory(null);
fail("Should throw exceptions, directory can't be null");
} catch (IllegalArgumentException e) {
}
}
public void testMatch() {
dirRule.setDirectory("/");
assertTrue("This URI should be matched", dirRule.matches( new MockHttpServletRequest() {
public String getServletPath() {
return "/test/hej.html";
}
}));
dirRule.setDirectory("test/");
assertTrue("This URI should be matched", dirRule.matches( new MockHttpServletRequest() {
public String getServletPath() {
return "/test/hej.html";
}
}));
assertFalse("This URI shouldn't be matched", dirRule.matches( new MockHttpServletRequest() {
public String getServletPath() {
return "../test/hej.html";
}
}));
assertFalse("This URI shouldn't be matched", dirRule.matches( new MockHttpServletRequest() {
public String getServletPath() {
return "test/.html";
}
}));
}
public void testProcess() {
/* In this test it is assumed that the input to process is indeed matched.
* This means that I will not send in illegal input to process.
*/
dirRule.setDirectory("/");
assertEquals("The URI should be the same as before", "/test/hej.html", dirRule.process("/test/hej.html"));
dirRule.setDirectory("/test/");
assertEquals("The initial directory should be removed", "/hej.html", dirRule.process("/test/hej.html"));
dirRule.setDirectory("/test/hej/");
assertEquals("The initial directory should be removed", "/hej.html", dirRule.process("/test/hej/hej.html"));
dirRule.setDirectory("/../test/hej/");
assertEquals("The initial directory should be removed", "/hej.html", dirRule.process("/../test/hej/hej.html"));
}
public void testRevert() {
dirRule.setDirectory("/");
assertEquals("The URI should be the same as before", "hej.html", dirRule.revert("hej.html"));
assertEquals("The URI should be the same as before", "/test/hej.html", dirRule.revert("/test/hej.html"));
dirRule.setDirectory("/test/");
assertEquals("The URI should be the same as before", "http://www.hej.com", dirRule.revert("http://www.hej.com"));
assertEquals("The URI should be the same as before", "C:/windows", dirRule.revert("C:/windows"));
assertEquals("The URI should be the same as before", "hej.html", dirRule.revert("hej.html"));
assertEquals("The URI should be the same as before", "../hej.html", dirRule.revert("../hej.html"));
assertEquals("The URI should be rewritten", "/test/hej.html", dirRule.revert("/hej.html"));
dirRule.setDirectory("/test/test2/");
assertEquals("The URI should be rewritten", "/test/test2/hum/hej.html", dirRule.revert("/hum/hej.html"));
}
}
| apache-2.0 |
jawadn/Tcheckit-android | src/com/fortutech/tcheckit/ejb/sessions/QuestionUniqueChoice.java | 1236 | package com.fortutech.tcheckit.ejb.sessions;
import java.util.Hashtable;
import org.ksoap2.serialization.PropertyInfo;
import org.ksoap2.serialization.SoapObject;
public final class QuestionUniqueChoice extends Question {
private com.fortutech.tcheckit.ejb.sessions.ChoiceQuestion[] listChoice;
public QuestionUniqueChoice() {
super();
}
public QuestionUniqueChoice(SoapObject so) {
super(so);
this.setListChoice(ChoiceQuestion.parse(so, "listChoice"));
}
public void setListChoice(com.fortutech.tcheckit.ejb.sessions.ChoiceQuestion[] listChoice) {
this.listChoice = listChoice;
}
public com.fortutech.tcheckit.ejb.sessions.ChoiceQuestion[] getListChoice() {
return this.listChoice;
}
public int getPropertyCount() {
return 1;
}
public Object getProperty(int __index) {
switch(__index) {
}
return null;
}
public void setProperty(int __index, Object __obj) {
switch(__index) {
}
}
public void getPropertyInfo(int __index, Hashtable __table, PropertyInfo __info) {
switch(__index) {
case 0:
__info.name = "listChoice";
}
}
}
| apache-2.0 |
DV8FromTheWorld/JDA | src/main/java/net/dv8tion/jda/api/interactions/components/buttons/ButtonStyle.java | 2531 | /*
* Copyright 2015 Austin Keener, Michael Ritter, Florian Spieß, and the JDA contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.dv8tion.jda.api.interactions.components.buttons;
import javax.annotation.Nonnull;
/**
* The available styles used for {@link Button Buttons}.
* <br>A button can have different styles to indicate its purpose.
*
* <p>To see what each button looks like here is an example cheatsheet:
* <br>
* <img alt="ButtonExample" src="https://raw.githubusercontent.com/DV8FromTheWorld/JDA/52377f69d1f3bfba909c51a449ac6b258f606956/assets/wiki/interactions/ButtonExamples.png">
*/
public enum ButtonStyle
{
/** Placeholder for future styles */
UNKNOWN(-1),
/** Primary button style, usually in blue. Often used as the accept, submit, or acknowledge button. For confirm buttons (destructive action), use {@link #DANGER}. */
PRIMARY(1),
/** Secondary button style, usually in gray. Often used as the cancel or lesser used option. */
SECONDARY(2),
/** Success/Approve button style, usually in green. This should be used to indicate a positive action. */
SUCCESS(3),
/** Danger/Deny button style, usually in red. This button should be used to indicate destructive actions. */
DANGER(4),
/** Link button style, usually in gray and has a link attached */
LINK(5),
;
private final int key;
ButtonStyle(int key)
{
this.key = key;
}
/**
* The raw style integer key
*
* @return The raw style key
*/
public int getKey()
{
return key;
}
/**
* Returns the style associated with the provided key
*
* @param key
* The key to convert
*
* @return The button style or {@link #UNKNOWN}
*/
@Nonnull
public static ButtonStyle fromKey(int key)
{
for (ButtonStyle style : values())
{
if (style.key == key)
return style;
}
return UNKNOWN;
}
}
| apache-2.0 |
MienDev/IdentityServer4 | src/IdentityServer4/src/Models/DeviceFlowAuthorizationRequest.cs | 878 | // Copyright (c) Brock Allen & Dominick Baier. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using System.Collections.Generic;
namespace IdentityServer4.Models
{
/// <summary>
/// Represents contextual information about a device flow authorization request.
/// </summary>
public class DeviceFlowAuthorizationRequest
{
/// <summary>
/// Gets or sets the client identifier.
/// </summary>
/// <value>
/// The client identifier.
/// </value>
public string ClientId { get; set; }
/// <summary>
/// Gets or sets the scopes requested.
/// </summary>
/// <value>
/// The scopes requested.
/// </value>
public IEnumerable<string> ScopesRequested { get; set; }
}
} | apache-2.0 |
emaskovsky/examples-other | Genetic Algorithms/stxFramework/src/com/styryx/gui/RootPanelHolder.java | 1163 | package com.styryx.gui;
import java.util.Locale;
import javax.swing.*;
public interface RootPanelHolder {
public abstract class PanelInitializer {
public static PanelInitializer create(RootPanelHolder holder) {
PanelInitializer initializer = holder.createPanelInitializer();
if (null == initializer) {
throw new NullPointerException("createPanelInitializer() must not return 'null'!");
}
return initializer;
}
public void initialize(JRootPane rootPane, Locale locale, CloseHandler closeHandler) throws Exception {
setupLookAndFeel();
createMainPanel(rootPane, locale);
JMenuBar menu = createMenu(closeHandler);
if (null != menu) {
rootPane.setJMenuBar(menu);
}
}
protected void setupLookAndFeel() throws Exception {
// Set cross-platform look nad feel by default.
UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName());
}
protected JMenuBar createMenu(CloseHandler closeHandler) {
// No menu by default.
return null;
}
protected abstract void createMainPanel(JRootPane rootPane, Locale locale) throws Exception;
}
PanelInitializer createPanelInitializer();
}
| apache-2.0 |
slvrgauthier/archives | Fac/Master/IN207/Warbot/src/trashier/LauncherGui.java | 9507 | /*
* LauncherGui.java -TurtleKit - A 'star logo' in MadKit
* Copyright (C) 2000-2007 Fabien Michel, Gregory Beurier
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package trashier;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.GridLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JSlider;
import javax.swing.JTextField;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import edu.turtlekit2.Tk2Launcher;
import edu.turtlekit2.kernel.agents.SimulationRunner;
import edu.turtlekit2.ui.utils.GUIMessage;
import madkit.kernel.OPanel;
import madkit.system.property.PropertyAgent;
import madkit.utils.graphics.GraphicUtils;
/**
* this class defines the Graphics object to execute the Launcher method. It is included in tabbledLauncherPanel.
*
* @author Fabien MICHEL, Gregory BEURIER
*/
public class LauncherGui implements ActionListener {
private static final long serialVersionUID = 1L;
JTextField p, dD, pause, displayDelay; //zone de saisie
public JButton startStop, wrapOnOff, addViewer, reset, step, python; //les boutons
SimulationRunner ll;
public JPanel buttons, allbuttons, cycle;
public OPanel textDisplay;
public JButton bProp;
public JPanel contentPane;
PropertyAgent prop;
ImageIcon iStart, iStep, iReset, iStop, iPythonEd, iView, iProps;
public LauncherGui(SimulationRunner l) {
ll = l;
iStart = createImageIcon("images/Play16.gif", "Play");
iStop = createImageIcon("images/Pause16.gif", "Pause");
iStep = createImageIcon("images/StepForward16.gif", "Step");
iReset = createImageIcon("images/Refresh16.gif","Reset");
iPythonEd = createImageIcon("images/Edit16.gif","Python");
// iView = createImageIcon("images/Zoom16.gif","New Viewer");
// iProps = createImageIcon("images/Help16.gif","Properties");
}
private void makebutton(JButton b, JPanel p) {
p.add(b);
b.addActionListener(this);
}
JButton createButton(JPanel p, String action, String descr, ImageIcon i) {
JButton b;
if (i != null)
b = new JButton(i);
else
b = new JButton(action);
b.setToolTipText(descr);
b.setMargin(new Insets(0, 0, 0, 0));
b.setActionCommand(action);
b.addActionListener(this);
if (p != null)
p.add(b);
return b;
}
protected ImageIcon createImageIcon(String path,
String description) {
java.net.URL imgURL = getClass().getResource(path);
if (imgURL != null) {
return new ImageIcon(imgURL, description);
} else {
System.err.println("Couldn't find file: " + path);
return null;
}
}
ImageIcon makeIcon(String path) {
java.net.URL url = this.getClass().getResource(path);
return new ImageIcon(url);
// if (path != null) {
// ImageIcon i = null;
// java.net.URL u = this.getClass().getResource(path);
// if (u != null)
// i = new ImageIcon(u);
//
// if ((i != null) && (i.getImage() != null))
// return i;
// }
// return null;
}
void setButtonState(JButton b, String action, ImageIcon icon) {
b.setActionCommand(action);
if (icon != null)
b.setIcon(icon);
}
public void initButtons(){
if (ll.environment.wrap)
wrapOnOff = new JButton("Wrap On");
else
wrapOnOff = new JButton("Wrap Off");
addViewer = new JButton("Add Viewer");
allbuttons = new JPanel(new GridLayout(1, 6));
startStop = createButton(allbuttons, "start", "Run and stop the simulation", iStart);
step = createButton(allbuttons, "Step", "Step the simulation", iStep);
reset = createButton(allbuttons, "Reset", "Reset the simulation", iReset);
makebutton(wrapOnOff, allbuttons);
// addViewer = createButton(allbuttons, "Add Viewer", "Add a viewer", iView);
python = createButton(allbuttons, "Python", "Launch a python editor", iPythonEd);
ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager",
new GUIMessage<JComponent>(allbuttons, SimulationUI.BUTTON_ZONE, ""));
}
public void initSliders(){
//Create the slider and its label
JLabel sliderLabel = new JLabel("Simulation speed", JLabel.CENTER);
sliderLabel.setAlignmentX(Component.CENTER_ALIGNMENT);
JSlider simulationSpeed = new JSlider(JSlider.HORIZONTAL, 0, 500, 490);
simulationSpeed.addChangeListener(new SliderListener());
simulationSpeed.setMajorTickSpacing(250);
simulationSpeed.setMinorTickSpacing(10);
simulationSpeed.setPaintTicks(true);
simulationSpeed.setPaintLabels(false);
simulationSpeed.setBorder(BorderFactory.createEmptyBorder(0, 0, 15, 0));
contentPane = new JPanel(new BorderLayout());
contentPane.add(sliderLabel, BorderLayout.WEST);
contentPane.add(simulationSpeed, BorderLayout.CENTER);
// bProp = createButton(null, "Properties", "Shows the simulation parameters", iProps);
// contentPane.add(bProp, BorderLayout.EAST);
ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager",
new GUIMessage<JComponent>(contentPane, SimulationUI.BUTTON_ZONE, ""));
}
public void initConsole(){
// textDisplay = new OPanel();//JTextArea();
// textDisplay.jscrollpane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
// textDisplay.jscrollpane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED);
// ll.setOutputWriter(textDisplay.getOut());
//
// ll.sendMessage(Tk2Launcher.COMMUNITY, ll.simulationGroup, "UIManager",
// new GUIMessage<JComponent>(textDisplay, SimulationUI.CONSOLE_ZONE, "Console"));
}
public void initialisation() {
initButtons();
initConsole();
initSliders();
}
public void actionPerformed(ActionEvent e) {
Object s = e.getSource();
if (s == startStop) {
if (startStop.getActionCommand().equalsIgnoreCase("Start")) {
startStop.setBackground(Color.green);
setButtonState(startStop, "Stop", iStop);
//startStop.setText("Stop");
ll.start = true;
return;
}
if (ll.run && ll.start) {
startStop.setBackground(Color.red);
//startStop.setText("Run");
setButtonState(startStop, "Run", iStart);
ll.setStop();
return;
} else if (ll.start) {
startStop.setBackground(Color.green);
setButtonState(startStop, "Stop", iStop);
//startStop.setText("Stop");
ll.setStop();
}
} else if (s == addViewer && ll.start)
// ll.addViewer();
System.err.println("TEST");
else if (s == reset && ll.start) {
textDisplay.clearOutput();
ll.setReset();
ll.run = true;
startStop.setBackground(Color.green);
//startStop.setText("Stop");
setButtonState(startStop, "Stop", iStop);
} else if (s == wrapOnOff) {
if (wrapOnOff.getText().equalsIgnoreCase("Wrap On")) {
ll.setWrapModeOn(false);
wrapOnOff.setText("Wrap Off");
} else {
ll.setWrapModeOn(true);
wrapOnOff.setText("Wrap On");
}
}
//if (s==p) ll.setCyclePause(Integer.parseInt(p.getText()));
//if (s==dD) ll.setCycleDisplayEvery(Integer.parseInt(dD.getText()));
else if (s == step) {
if (ll.start && ll.run) {
startStop.setBackground(Color.red);
// startStop.setText("Run");
setButtonState(startStop, "Run", iStart);
ll.setStop();
ll.stepByStep();
return;
}
if (ll.start) {
ll.stepByStep();
return;
}
} else if (s == bProp) {
if (prop == null) {
prop = new PropertyAgent(ll);
ll.launchAgent(prop,"Properties of " + ll.simulationGroup, true);
} else { // check
GraphicUtils.getFrameParent((JComponent) prop.getGUIObject()).setVisible(true);
}
} else if (s == python) {
try {
ll.println("launching python. Please wait...");
ll.launchPython();
if (ll.run) {
startStop.setBackground(Color.red);
//startStop.setText("Run");
setButtonState(startStop, "Run", iStart);
ll.setStop();
ll.stepByStep();
return;
}
} catch (NoClassDefFoundError ex) {
ll.println("can't launch python in applet mode");
} catch (Exception ex) {
ll.println("can't launch python in applet mode");
}
}
}
void removePropertyWindows() {
if (prop != null) {
ll.killAgent(prop);
}
}
class SliderListener implements ChangeListener {
public void stateChanged(ChangeEvent e) {
JSlider source = (JSlider) e.getSource();
if (!source.getValueIsAdjusting()) {
ll.sch.delay = (500 - (int) source.getValue());
}
}
}
}
| apache-2.0 |
tadayosi/camel | core/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KafkaComponentBuilderFactory.java | 96349 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.kafka.KafkaComponent;
/**
* Sent and receive messages to/from an Apache Kafka broker.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface KafkaComponentBuilderFactory {
/**
* Kafka (camel-kafka)
* Sent and receive messages to/from an Apache Kafka broker.
*
* Category: messaging
* Since: 2.13
* Maven coordinates: org.apache.camel:camel-kafka
*
* @return the dsl builder
*/
static KafkaComponentBuilder kafka() {
return new KafkaComponentBuilderImpl();
}
/**
* Builder for the Kafka component.
*/
interface KafkaComponentBuilder extends ComponentBuilder<KafkaComponent> {
/**
* Sets additional properties for either kafka consumer or kafka
* producer in case they can't be set directly on the camel
* configurations (e.g: new Kafka properties that are not reflected yet
* in Camel configurations), the properties have to be prefixed with
* additionalProperties.. E.g:
* additionalProperties.transactional.id=12345&amp;additionalProperties.schema.registry.url=http://localhost:8811/avro.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.lang.Object&gt;</code> type.
*
* Group: common
*
* @param additionalProperties the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder additionalProperties(
java.util.Map<java.lang.String, java.lang.Object> additionalProperties) {
doSetProperty("additionalProperties", additionalProperties);
return this;
}
/**
* URL of the Kafka brokers to use. The format is
* host1:port1,host2:port2, and the list can be a subset of brokers or a
* VIP pointing to a subset of brokers. This option is known as
* bootstrap.servers in the Kafka documentation.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param brokers the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder brokers(java.lang.String brokers) {
doSetProperty("brokers", brokers);
return this;
}
/**
* The client id is a user-specified string sent in each request to help
* trace calls. It should logically identify the application making the
* request.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param clientId the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder clientId(java.lang.String clientId) {
doSetProperty("clientId", clientId);
return this;
}
/**
* Allows to pre-configure the Kafka component with common options that
* the endpoints will reuse.
*
* The option is a:
* <code>org.apache.camel.component.kafka.KafkaConfiguration</code> type.
*
* Group: common
*
* @param configuration the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder configuration(
org.apache.camel.component.kafka.KafkaConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from
* Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code>
* type.
*
* Group: common
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder headerFilterStrategy(
org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* The maximum amount of time in milliseconds to wait when reconnecting
* to a broker that has repeatedly failed to connect. If provided, the
* backoff per host will increase exponentially for each consecutive
* connection failure, up to this maximum. After calculating the backoff
* increase, 20% random jitter is added to avoid connection storms.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 1000
* Group: common
*
* @param reconnectBackoffMaxMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder reconnectBackoffMaxMs(
java.lang.Integer reconnectBackoffMaxMs) {
doSetProperty("reconnectBackoffMaxMs", reconnectBackoffMaxMs);
return this;
}
/**
* Timeout in milli seconds to wait gracefully for the consumer or
* producer to shutdown and terminate its worker threads.
*
* The option is a: <code>int</code> type.
*
* Default: 30000
* Group: common
*
* @param shutdownTimeout the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder shutdownTimeout(int shutdownTimeout) {
doSetProperty("shutdownTimeout", shutdownTimeout);
return this;
}
/**
* Whether to allow doing manual commits via KafkaManualCommit. If this
* option is enabled then an instance of KafkaManualCommit is stored on
* the Exchange message header, which allows end users to access this
* API and perform manual offset commits via the Kafka consumer.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param allowManualCommit the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder allowManualCommit(
boolean allowManualCommit) {
doSetProperty("allowManualCommit", allowManualCommit);
return this;
}
/**
* If true, periodically commit to ZooKeeper the offset of messages
* already fetched by the consumer. This committed offset will be used
* when the process fails as the position from which the new consumer
* will begin.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param autoCommitEnable the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder autoCommitEnable(
java.lang.Boolean autoCommitEnable) {
doSetProperty("autoCommitEnable", autoCommitEnable);
return this;
}
/**
* The frequency in ms that the consumer offsets are committed to
* zookeeper.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 5000
* Group: consumer
*
* @param autoCommitIntervalMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder autoCommitIntervalMs(
java.lang.Integer autoCommitIntervalMs) {
doSetProperty("autoCommitIntervalMs", autoCommitIntervalMs);
return this;
}
/**
* Whether to perform an explicit auto commit when the consumer stops to
* ensure the broker has a commit from the last consumed message. This
* requires the option autoCommitEnable is turned on. The possible
* values are: sync, async, or none. And sync is the default value.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: sync
* Group: consumer
*
* @param autoCommitOnStop the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder autoCommitOnStop(
java.lang.String autoCommitOnStop) {
doSetProperty("autoCommitOnStop", autoCommitOnStop);
return this;
}
/**
* What to do when there is no initial offset in ZooKeeper or if an
* offset is out of range: earliest : automatically reset the offset to
* the earliest offset latest : automatically reset the offset to the
* latest offset fail: throw exception to the consumer.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: latest
* Group: consumer
*
* @param autoOffsetReset the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder autoOffsetReset(
java.lang.String autoOffsetReset) {
doSetProperty("autoOffsetReset", autoOffsetReset);
return this;
}
/**
* This options controls what happens when a consumer is processing an
* exchange and it fails. If the option is false then the consumer
* continues to the next message and processes it. If the option is true
* then the consumer breaks out, and will seek back to offset of the
* message that caused a failure, and then re-attempt to process this
* message. However this can lead to endless processing of the same
* message if its bound to fail every time, eg a poison message.
* Therefore its recommended to deal with that for example by using
* Camel's error handler.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param breakOnFirstError the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder breakOnFirstError(
boolean breakOnFirstError) {
doSetProperty("breakOnFirstError", breakOnFirstError);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Automatically check the CRC32 of the records consumed. This ensures
* no on-the-wire or on-disk corruption to the messages occurred. This
* check adds some overhead, so it may be disabled in cases seeking
* extreme performance.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param checkCrcs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder checkCrcs(java.lang.Boolean checkCrcs) {
doSetProperty("checkCrcs", checkCrcs);
return this;
}
/**
* The configuration controls the maximum amount of time the client will
* wait for the response of a request. If the response is not received
* before the timeout elapses the client will resend the request if
* necessary or fail the request if retries are exhausted.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 40000
* Group: consumer
*
* @param consumerRequestTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder consumerRequestTimeoutMs(
java.lang.Integer consumerRequestTimeoutMs) {
doSetProperty("consumerRequestTimeoutMs", consumerRequestTimeoutMs);
return this;
}
/**
* The number of consumers that connect to kafka server.
*
* The option is a: <code>int</code> type.
*
* Default: 1
* Group: consumer
*
* @param consumersCount the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder consumersCount(int consumersCount) {
doSetProperty("consumersCount", consumersCount);
return this;
}
/**
* Number of concurrent consumers on the consumer.
*
* The option is a: <code>int</code> type.
*
* Default: 10
* Group: consumer
*
* @param consumerStreams the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder consumerStreams(int consumerStreams) {
doSetProperty("consumerStreams", consumerStreams);
return this;
}
/**
* The maximum amount of data the server should return for a fetch
* request This is not an absolute maximum, if the first message in the
* first non-empty partition of the fetch is larger than this value, the
* message will still be returned to ensure that the consumer can make
* progress. The maximum message size accepted by the broker is defined
* via message.max.bytes (broker config) or max.message.bytes (topic
* config). Note that the consumer performs multiple fetches in
* parallel.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 52428800
* Group: consumer
*
* @param fetchMaxBytes the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder fetchMaxBytes(
java.lang.Integer fetchMaxBytes) {
doSetProperty("fetchMaxBytes", fetchMaxBytes);
return this;
}
/**
* The minimum amount of data the server should return for a fetch
* request. If insufficient data is available the request will wait for
* that much data to accumulate before answering the request.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 1
* Group: consumer
*
* @param fetchMinBytes the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder fetchMinBytes(
java.lang.Integer fetchMinBytes) {
doSetProperty("fetchMinBytes", fetchMinBytes);
return this;
}
/**
* The maximum amount of time the server will block before answering the
* fetch request if there isn't sufficient data to immediately satisfy
* fetch.min.bytes.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 500
* Group: consumer
*
* @param fetchWaitMaxMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder fetchWaitMaxMs(
java.lang.Integer fetchWaitMaxMs) {
doSetProperty("fetchWaitMaxMs", fetchWaitMaxMs);
return this;
}
/**
* A string that uniquely identifies the group of consumer processes to
* which this consumer belongs. By setting the same group id multiple
* processes indicate that they are all part of the same consumer group.
* This option is required for consumers.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param groupId the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder groupId(java.lang.String groupId) {
doSetProperty("groupId", groupId);
return this;
}
/**
* A unique identifier of the consumer instance provided by the end
* user. Only non-empty strings are permitted. If set, the consumer is
* treated as a static member, which means that only one instance with
* this ID is allowed in the consumer group at any time. This can be
* used in combination with a larger session timeout to avoid group
* rebalances caused by transient unavailability (e.g. process
* restarts). If not set, the consumer will join the group as a dynamic
* member, which is the traditional behavior.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param groupInstanceId the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder groupInstanceId(
java.lang.String groupInstanceId) {
doSetProperty("groupInstanceId", groupInstanceId);
return this;
}
/**
* To use a custom KafkaHeaderDeserializer to deserialize kafka headers
* values.
*
* The option is a:
* <code>org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer</code> type.
*
* Group: consumer
*
* @param headerDeserializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder headerDeserializer(
org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer headerDeserializer) {
doSetProperty("headerDeserializer", headerDeserializer);
return this;
}
/**
* The expected time between heartbeats to the consumer coordinator when
* using Kafka's group management facilities. Heartbeats are used to
* ensure that the consumer's session stays active and to facilitate
* rebalancing when new consumers join or leave the group. The value
* must be set lower than session.timeout.ms, but typically should be
* set no higher than 1/3 of that value. It can be adjusted even lower
* to control the expected time for normal rebalances.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 3000
* Group: consumer
*
* @param heartbeatIntervalMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder heartbeatIntervalMs(
java.lang.Integer heartbeatIntervalMs) {
doSetProperty("heartbeatIntervalMs", heartbeatIntervalMs);
return this;
}
/**
* Deserializer class for key that implements the Deserializer
* interface.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.common.serialization.StringDeserializer
* Group: consumer
*
* @param keyDeserializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder keyDeserializer(
java.lang.String keyDeserializer) {
doSetProperty("keyDeserializer", keyDeserializer);
return this;
}
/**
* The maximum amount of data per-partition the server will return. The
* maximum total memory used for a request will be #partitions
* max.partition.fetch.bytes. This size must be at least as large as the
* maximum message size the server allows or else it is possible for the
* producer to send messages larger than the consumer can fetch. If that
* happens, the consumer can get stuck trying to fetch a large message
* on a certain partition.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 1048576
* Group: consumer
*
* @param maxPartitionFetchBytes the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxPartitionFetchBytes(
java.lang.Integer maxPartitionFetchBytes) {
doSetProperty("maxPartitionFetchBytes", maxPartitionFetchBytes);
return this;
}
/**
* The maximum delay between invocations of poll() when using consumer
* group management. This places an upper bound on the amount of time
* that the consumer can be idle before fetching more records. If poll()
* is not called before expiration of this timeout, then the consumer is
* considered failed and the group will rebalance in order to reassign
* the partitions to another member.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: consumer
*
* @param maxPollIntervalMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxPollIntervalMs(
java.lang.Long maxPollIntervalMs) {
doSetProperty("maxPollIntervalMs", maxPollIntervalMs);
return this;
}
/**
* The maximum number of records returned in a single call to poll().
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 500
* Group: consumer
*
* @param maxPollRecords the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxPollRecords(
java.lang.Integer maxPollRecords) {
doSetProperty("maxPollRecords", maxPollRecords);
return this;
}
/**
* The offset repository to use in order to locally store the offset of
* each partition of the topic. Defining one will disable the
* autocommit.
*
* The option is a:
* <code>org.apache.camel.spi.StateRepository&lt;java.lang.String, java.lang.String&gt;</code> type.
*
* Group: consumer
*
* @param offsetRepository the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder offsetRepository(
org.apache.camel.spi.StateRepository<java.lang.String, java.lang.String> offsetRepository) {
doSetProperty("offsetRepository", offsetRepository);
return this;
}
/**
* The class name of the partition assignment strategy that the client
* will use to distribute partition ownership amongst consumer instances
* when group management is used.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.clients.consumer.RangeAssignor
* Group: consumer
*
* @param partitionAssignor the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder partitionAssignor(
java.lang.String partitionAssignor) {
doSetProperty("partitionAssignor", partitionAssignor);
return this;
}
/**
* What to do if kafka threw an exception while polling for new
* messages. Will by default use the value from the component
* configuration unless an explicit value has been configured on the
* endpoint level. DISCARD will discard the message and continue to poll
* next message. ERROR_HANDLER will use Camel's error handler to process
* the exception, and afterwards continue to poll next message.
* RECONNECT will re-connect the consumer and try poll the message again
* RETRY will let the consumer retry polling the same message again STOP
* will stop the consumer (have to be manually started/restarted if the
* consumer should be able to consume messages again).
*
* The option is a:
* <code>org.apache.camel.component.kafka.PollOnError</code>
* type.
*
* Default: ERROR_HANDLER
* Group: consumer
*
* @param pollOnError the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder pollOnError(
org.apache.camel.component.kafka.PollOnError pollOnError) {
doSetProperty("pollOnError", pollOnError);
return this;
}
/**
* The timeout used when polling the KafkaConsumer.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Default: 5000
* Group: consumer
*
* @param pollTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder pollTimeoutMs(java.lang.Long pollTimeoutMs) {
doSetProperty("pollTimeoutMs", pollTimeoutMs);
return this;
}
/**
* Set if KafkaConsumer will read from beginning or end on startup:
* beginning : read from beginning end : read from end This is replacing
* the earlier property seekToBeginning.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param seekTo the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder seekTo(java.lang.String seekTo) {
doSetProperty("seekTo", seekTo);
return this;
}
/**
* The timeout used to detect failures when using Kafka's group
* management facilities.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 10000
* Group: consumer
*
* @param sessionTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sessionTimeoutMs(
java.lang.Integer sessionTimeoutMs) {
doSetProperty("sessionTimeoutMs", sessionTimeoutMs);
return this;
}
/**
* This enables the use of a specific Avro reader for use with the
* Confluent Platform schema registry and the
* io.confluent.kafka.serializers.KafkaAvroDeserializer. This option is
* only available in the Confluent Platform (not standard Apache Kafka).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param specificAvroReader the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder specificAvroReader(
boolean specificAvroReader) {
doSetProperty("specificAvroReader", specificAvroReader);
return this;
}
/**
* Whether the topic is a pattern (regular expression). This can be used
* to subscribe to dynamic number of topics matching the pattern.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param topicIsPattern the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder topicIsPattern(boolean topicIsPattern) {
doSetProperty("topicIsPattern", topicIsPattern);
return this;
}
/**
* Deserializer class for value that implements the Deserializer
* interface.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.common.serialization.StringDeserializer
* Group: consumer
*
* @param valueDeserializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder valueDeserializer(
java.lang.String valueDeserializer) {
doSetProperty("valueDeserializer", valueDeserializer);
return this;
}
/**
* Factory to use for creating KafkaManualCommit instances. This allows
* to plugin a custom factory to create custom KafkaManualCommit
* instances in case special logic is needed when doing manual commits
* that deviates from the default implementation that comes out of the
* box.
*
* The option is a:
* <code>org.apache.camel.component.kafka.KafkaManualCommitFactory</code> type.
*
* Group: consumer (advanced)
*
* @param kafkaManualCommitFactory the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kafkaManualCommitFactory(
org.apache.camel.component.kafka.KafkaManualCommitFactory kafkaManualCommitFactory) {
doSetProperty("kafkaManualCommitFactory", kafkaManualCommitFactory);
return this;
}
/**
* To use a custom strategy with the consumer to control how to handle
* exceptions thrown from the Kafka broker while pooling messages.
*
* The option is a:
* <code>org.apache.camel.component.kafka.PollExceptionStrategy</code> type.
*
* Group: consumer (advanced)
*
* @param pollExceptionStrategy the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder pollExceptionStrategy(
org.apache.camel.component.kafka.PollExceptionStrategy pollExceptionStrategy) {
doSetProperty("pollExceptionStrategy", pollExceptionStrategy);
return this;
}
/**
* The total bytes of memory the producer can use to buffer records
* waiting to be sent to the server. If records are sent faster than
* they can be delivered to the server the producer will either block or
* throw an exception based on the preference specified by
* block.on.buffer.full.This setting should correspond roughly to the
* total memory the producer will use, but is not a hard bound since not
* all memory the producer uses is used for buffering. Some additional
* memory will be used for compression (if compression is enabled) as
* well as for maintaining in-flight requests.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 33554432
* Group: producer
*
* @param bufferMemorySize the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder bufferMemorySize(
java.lang.Integer bufferMemorySize) {
doSetProperty("bufferMemorySize", bufferMemorySize);
return this;
}
/**
* This parameter allows you to specify the compression codec for all
* data generated by this producer. Valid values are none, gzip and
* snappy.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: none
* Group: producer
*
* @param compressionCodec the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder compressionCodec(
java.lang.String compressionCodec) {
doSetProperty("compressionCodec", compressionCodec);
return this;
}
/**
* Close idle connections after the number of milliseconds specified by
* this config.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 540000
* Group: producer
*
* @param connectionMaxIdleMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder connectionMaxIdleMs(
java.lang.Integer connectionMaxIdleMs) {
doSetProperty("connectionMaxIdleMs", connectionMaxIdleMs);
return this;
}
/**
* An upper bound on the time to report success or failure after a call
* to send() returns. This limits the total time that a record will be
* delayed prior to sending, the time to await acknowledgement from the
* broker (if expected), and the time allowed for retriable send
* failures.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 120000
* Group: producer
*
* @param deliveryTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder deliveryTimeoutMs(
java.lang.Integer deliveryTimeoutMs) {
doSetProperty("deliveryTimeoutMs", deliveryTimeoutMs);
return this;
}
/**
* If set to 'true' the producer will ensure that exactly one copy of
* each message is written in the stream. If 'false', producer retries
* may write duplicates of the retried message in the stream. If set to
* true this option will require max.in.flight.requests.per.connection
* to be set to 1 and retries cannot be zero and additionally acks must
* be set to 'all'.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param enableIdempotence the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder enableIdempotence(
boolean enableIdempotence) {
doSetProperty("enableIdempotence", enableIdempotence);
return this;
}
/**
* To use a custom KafkaHeaderSerializer to serialize kafka headers
* values.
*
* The option is a:
* <code>org.apache.camel.component.kafka.serde.KafkaHeaderSerializer</code> type.
*
* Group: producer
*
* @param headerSerializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder headerSerializer(
org.apache.camel.component.kafka.serde.KafkaHeaderSerializer headerSerializer) {
doSetProperty("headerSerializer", headerSerializer);
return this;
}
/**
* The record key (or null if no key is specified). If this option has
* been configured then it take precedence over header
* KafkaConstants#KEY.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param key the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder key(java.lang.String key) {
doSetProperty("key", key);
return this;
}
/**
* The serializer class for keys (defaults to the same as for messages
* if nothing is given).
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.common.serialization.StringSerializer
* Group: producer
*
* @param keySerializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder keySerializer(
java.lang.String keySerializer) {
doSetProperty("keySerializer", keySerializer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* The producer groups together any records that arrive in between
* request transmissions into a single batched request. Normally this
* occurs only under load when records arrive faster than they can be
* sent out. However in some circumstances the client may want to reduce
* the number of requests even under moderate load. This setting
* accomplishes this by adding a small amount of artificial delay that
* is, rather than immediately sending out a record the producer will
* wait for up to the given delay to allow other records to be sent so
* that the sends can be batched together. This can be thought of as
* analogous to Nagle's algorithm in TCP. This setting gives the upper
* bound on the delay for batching: once we get batch.size worth of
* records for a partition it will be sent immediately regardless of
* this setting, however if we have fewer than this many bytes
* accumulated for this partition we will 'linger' for the specified
* time waiting for more records to show up. This setting defaults to 0
* (i.e. no delay). Setting linger.ms=5, for example, would have the
* effect of reducing the number of requests sent but would add up to
* 5ms of latency to records sent in the absense of load.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 0
* Group: producer
*
* @param lingerMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder lingerMs(java.lang.Integer lingerMs) {
doSetProperty("lingerMs", lingerMs);
return this;
}
/**
* The configuration controls how long sending to kafka will block.
* These methods can be blocked for multiple reasons. For e.g: buffer
* full, metadata unavailable.This configuration imposes maximum limit
* on the total time spent in fetching metadata, serialization of key
* and value, partitioning and allocation of buffer memory when doing a
* send(). In case of partitionsFor(), this configuration imposes a
* maximum time threshold on waiting for metadata.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 60000
* Group: producer
*
* @param maxBlockMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxBlockMs(java.lang.Integer maxBlockMs) {
doSetProperty("maxBlockMs", maxBlockMs);
return this;
}
/**
* The maximum number of unacknowledged requests the client will send on
* a single connection before blocking. Note that if this setting is set
* to be greater than 1 and there are failed sends, there is a risk of
* message re-ordering due to retries (i.e., if retries are enabled).
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 5
* Group: producer
*
* @param maxInFlightRequest the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxInFlightRequest(
java.lang.Integer maxInFlightRequest) {
doSetProperty("maxInFlightRequest", maxInFlightRequest);
return this;
}
/**
* The maximum size of a request. This is also effectively a cap on the
* maximum record size. Note that the server has its own cap on record
* size which may be different from this. This setting will limit the
* number of record batches the producer will send in a single request
* to avoid sending huge requests.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 1048576
* Group: producer
*
* @param maxRequestSize the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder maxRequestSize(
java.lang.Integer maxRequestSize) {
doSetProperty("maxRequestSize", maxRequestSize);
return this;
}
/**
* The period of time in milliseconds after which we force a refresh of
* metadata even if we haven't seen any partition leadership changes to
* proactively discover any new brokers or partitions.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 300000
* Group: producer
*
* @param metadataMaxAgeMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder metadataMaxAgeMs(
java.lang.Integer metadataMaxAgeMs) {
doSetProperty("metadataMaxAgeMs", metadataMaxAgeMs);
return this;
}
/**
* A list of classes to use as metrics reporters. Implementing the
* MetricReporter interface allows plugging in classes that will be
* notified of new metric creation. The JmxReporter is always included
* to register JMX statistics.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param metricReporters the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder metricReporters(
java.lang.String metricReporters) {
doSetProperty("metricReporters", metricReporters);
return this;
}
/**
* The number of samples maintained to compute metrics.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 30000
* Group: producer
*
* @param metricsSampleWindowMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder metricsSampleWindowMs(
java.lang.Integer metricsSampleWindowMs) {
doSetProperty("metricsSampleWindowMs", metricsSampleWindowMs);
return this;
}
/**
* The number of samples maintained to compute metrics.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 2
* Group: producer
*
* @param noOfMetricsSample the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder noOfMetricsSample(
java.lang.Integer noOfMetricsSample) {
doSetProperty("noOfMetricsSample", noOfMetricsSample);
return this;
}
/**
* The partitioner class for partitioning messages amongst sub-topics.
* The default partitioner is based on the hash of the key.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default:
* org.apache.kafka.clients.producer.internals.DefaultPartitioner
* Group: producer
*
* @param partitioner the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder partitioner(java.lang.String partitioner) {
doSetProperty("partitioner", partitioner);
return this;
}
/**
* The partition to which the record will be sent (or null if no
* partition was specified). If this option has been configured then it
* take precedence over header KafkaConstants#PARTITION_KEY.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Group: producer
*
* @param partitionKey the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder partitionKey(
java.lang.Integer partitionKey) {
doSetProperty("partitionKey", partitionKey);
return this;
}
/**
* The producer will attempt to batch records together into fewer
* requests whenever multiple records are being sent to the same
* partition. This helps performance on both the client and the server.
* This configuration controls the default batch size in bytes. No
* attempt will be made to batch records larger than this size.Requests
* sent to brokers will contain multiple batches, one for each partition
* with data available to be sent.A small batch size will make batching
* less common and may reduce throughput (a batch size of zero will
* disable batching entirely). A very large batch size may use memory a
* bit more wastefully as we will always allocate a buffer of the
* specified batch size in anticipation of additional records.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 16384
* Group: producer
*
* @param producerBatchSize the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder producerBatchSize(
java.lang.Integer producerBatchSize) {
doSetProperty("producerBatchSize", producerBatchSize);
return this;
}
/**
* The maximum number of unsent messages that can be queued up the
* producer when using async mode before either the producer must be
* blocked or data must be dropped.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 10000
* Group: producer
*
* @param queueBufferingMaxMessages the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder queueBufferingMaxMessages(
java.lang.Integer queueBufferingMaxMessages) {
doSetProperty("queueBufferingMaxMessages", queueBufferingMaxMessages);
return this;
}
/**
* The size of the TCP receive buffer (SO_RCVBUF) to use when reading
* data.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 65536
* Group: producer
*
* @param receiveBufferBytes the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder receiveBufferBytes(
java.lang.Integer receiveBufferBytes) {
doSetProperty("receiveBufferBytes", receiveBufferBytes);
return this;
}
/**
* The amount of time to wait before attempting to reconnect to a given
* host. This avoids repeatedly connecting to a host in a tight loop.
* This backoff applies to all requests sent by the consumer to the
* broker.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 50
* Group: producer
*
* @param reconnectBackoffMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder reconnectBackoffMs(
java.lang.Integer reconnectBackoffMs) {
doSetProperty("reconnectBackoffMs", reconnectBackoffMs);
return this;
}
/**
* Whether the producer should store the RecordMetadata results from
* sending to Kafka. The results are stored in a List containing the
* RecordMetadata metadata's. The list is stored on a header with the
* key KafkaConstants#KAFKA_RECORDMETA.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer
*
* @param recordMetadata the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder recordMetadata(boolean recordMetadata) {
doSetProperty("recordMetadata", recordMetadata);
return this;
}
/**
* The number of acknowledgments the producer requires the leader to
* have received before considering a request complete. This controls
* the durability of records that are sent. The following settings are
* common: acks=0 If set to zero then the producer will not wait for any
* acknowledgment from the server at all. The record will be immediately
* added to the socket buffer and considered sent. No guarantee can be
* made that the server has received the record in this case, and the
* retries configuration will not take effect (as the client won't
* generally know of any failures). The offset given back for each
* record will always be set to -1. acks=1 This will mean the leader
* will write the record to its local log but will respond without
* awaiting full acknowledgement from all followers. In this case should
* the leader fail immediately after acknowledging the record but before
* the followers have replicated it then the record will be lost.
* acks=all This means the leader will wait for the full set of in-sync
* replicas to acknowledge the record. This guarantees that the record
* will not be lost as long as at least one in-sync replica remains
* alive. This is the strongest available guarantee.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: 1
* Group: producer
*
* @param requestRequiredAcks the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder requestRequiredAcks(
java.lang.String requestRequiredAcks) {
doSetProperty("requestRequiredAcks", requestRequiredAcks);
return this;
}
/**
* The amount of time the broker will wait trying to meet the
* request.required.acks requirement before sending back an error to the
* client.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 30000
* Group: producer
*
* @param requestTimeoutMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder requestTimeoutMs(
java.lang.Integer requestTimeoutMs) {
doSetProperty("requestTimeoutMs", requestTimeoutMs);
return this;
}
/**
* Setting a value greater than zero will cause the client to resend any
* record whose send fails with a potentially transient error. Note that
* this retry is no different than if the client resent the record upon
* receiving the error. Allowing retries will potentially change the
* ordering of records because if two records are sent to a single
* partition, and the first fails and is retried but the second
* succeeds, then the second record may appear first.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 0
* Group: producer
*
* @param retries the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder retries(java.lang.Integer retries) {
doSetProperty("retries", retries);
return this;
}
/**
* Before each retry, the producer refreshes the metadata of relevant
* topics to see if a new leader has been elected. Since leader election
* takes a bit of time, this property specifies the amount of time that
* the producer waits before refreshing the metadata.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 100
* Group: producer
*
* @param retryBackoffMs the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder retryBackoffMs(
java.lang.Integer retryBackoffMs) {
doSetProperty("retryBackoffMs", retryBackoffMs);
return this;
}
/**
* Socket write buffer size.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 131072
* Group: producer
*
* @param sendBufferBytes the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sendBufferBytes(
java.lang.Integer sendBufferBytes) {
doSetProperty("sendBufferBytes", sendBufferBytes);
return this;
}
/**
* The serializer class for messages.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: org.apache.kafka.common.serialization.StringSerializer
* Group: producer
*
* @param valueSerializer the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder valueSerializer(
java.lang.String valueSerializer) {
doSetProperty("valueSerializer", valueSerializer);
return this;
}
/**
* To use a custom worker pool for continue routing Exchange after kafka
* server has acknowledge the message that was sent to it from
* KafkaProducer using asynchronous non-blocking processing. If using
* this option then you must handle the lifecycle of the thread pool to
* shut the pool down when no longer needed.
*
* The option is a:
* <code>java.util.concurrent.ExecutorService</code> type.
*
* Group: producer
*
* @param workerPool the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder workerPool(
java.util.concurrent.ExecutorService workerPool) {
doSetProperty("workerPool", workerPool);
return this;
}
/**
* Number of core threads for the worker pool for continue routing
* Exchange after kafka server has acknowledge the message that was sent
* to it from KafkaProducer using asynchronous non-blocking processing.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 10
* Group: producer
*
* @param workerPoolCoreSize the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder workerPoolCoreSize(
java.lang.Integer workerPoolCoreSize) {
doSetProperty("workerPoolCoreSize", workerPoolCoreSize);
return this;
}
/**
* Maximum number of threads for the worker pool for continue routing
* Exchange after kafka server has acknowledge the message that was sent
* to it from KafkaProducer using asynchronous non-blocking processing.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 20
* Group: producer
*
* @param workerPoolMaxSize the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder workerPoolMaxSize(
java.lang.Integer workerPoolMaxSize) {
doSetProperty("workerPoolMaxSize", workerPoolMaxSize);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Factory to use for creating
* org.apache.kafka.clients.consumer.KafkaConsumer and
* org.apache.kafka.clients.producer.KafkaProducer instances. This
* allows to configure a custom factory to create instances with logic
* that extends the vanilla Kafka clients.
*
* The option is a:
* <code>org.apache.camel.component.kafka.KafkaClientFactory</code> type.
*
* Group: advanced
*
* @param kafkaClientFactory the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kafkaClientFactory(
org.apache.camel.component.kafka.KafkaClientFactory kafkaClientFactory) {
doSetProperty("kafkaClientFactory", kafkaClientFactory);
return this;
}
/**
* Sets whether synchronous processing should be strictly used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param synchronous the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* URL of the Confluent Platform schema registry servers to use. The
* format is host1:port1,host2:port2. This is known as
* schema.registry.url in the Confluent Platform documentation. This
* option is only available in the Confluent Platform (not standard
* Apache Kafka).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: confluent
*
* @param schemaRegistryURL the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder schemaRegistryURL(
java.lang.String schemaRegistryURL) {
doSetProperty("schemaRegistryURL", schemaRegistryURL);
return this;
}
/**
* Sets interceptors for producer or consumers. Producer interceptors
* have to be classes implementing
* org.apache.kafka.clients.producer.ProducerInterceptor Consumer
* interceptors have to be classes implementing
* org.apache.kafka.clients.consumer.ConsumerInterceptor Note that if
* you use Producer interceptor on a consumer it will throw a class cast
* exception in runtime.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: monitoring
*
* @param interceptorClasses the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder interceptorClasses(
java.lang.String interceptorClasses) {
doSetProperty("interceptorClasses", interceptorClasses);
return this;
}
/**
* Login thread sleep time between refresh attempts.
*
* The option is a: <code>java.lang.Integer</code> type.
*
* Default: 60000
* Group: security
*
* @param kerberosBeforeReloginMinTime the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kerberosBeforeReloginMinTime(
java.lang.Integer kerberosBeforeReloginMinTime) {
doSetProperty("kerberosBeforeReloginMinTime", kerberosBeforeReloginMinTime);
return this;
}
/**
* Kerberos kinit command path. Default is /usr/bin/kinit.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: /usr/bin/kinit
* Group: security
*
* @param kerberosInitCmd the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kerberosInitCmd(
java.lang.String kerberosInitCmd) {
doSetProperty("kerberosInitCmd", kerberosInitCmd);
return this;
}
/**
* A list of rules for mapping from principal names to short names
* (typically operating system usernames). The rules are evaluated in
* order and the first rule that matches a principal name is used to map
* it to a short name. Any later rules in the list are ignored. By
* default, principal names of the form {username}/{hostname}{REALM} are
* mapped to {username}. For more details on the format please see the
* security authorization and acls documentation.. Multiple values can
* be separated by comma.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: DEFAULT
* Group: security
*
* @param kerberosPrincipalToLocalRules the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kerberosPrincipalToLocalRules(
java.lang.String kerberosPrincipalToLocalRules) {
doSetProperty("kerberosPrincipalToLocalRules", kerberosPrincipalToLocalRules);
return this;
}
/**
* Percentage of random jitter added to the renewal time.
*
* The option is a: <code>java.lang.Double</code> type.
*
* Default: 0.05
* Group: security
*
* @param kerberosRenewJitter the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kerberosRenewJitter(
java.lang.Double kerberosRenewJitter) {
doSetProperty("kerberosRenewJitter", kerberosRenewJitter);
return this;
}
/**
* Login thread will sleep until the specified window factor of time
* from last refresh to ticket's expiry has been reached, at which time
* it will try to renew the ticket.
*
* The option is a: <code>java.lang.Double</code> type.
*
* Default: 0.8
* Group: security
*
* @param kerberosRenewWindowFactor the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder kerberosRenewWindowFactor(
java.lang.Double kerberosRenewWindowFactor) {
doSetProperty("kerberosRenewWindowFactor", kerberosRenewWindowFactor);
return this;
}
/**
* Expose the kafka sasl.jaas.config parameter Example:
* org.apache.kafka.common.security.plain.PlainLoginModule required
* username=USERNAME password=PASSWORD;.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param saslJaasConfig the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder saslJaasConfig(
java.lang.String saslJaasConfig) {
doSetProperty("saslJaasConfig", saslJaasConfig);
return this;
}
/**
* The Kerberos principal name that Kafka runs as. This can be defined
* either in Kafka's JAAS config or in Kafka's config.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param saslKerberosServiceName the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder saslKerberosServiceName(
java.lang.String saslKerberosServiceName) {
doSetProperty("saslKerberosServiceName", saslKerberosServiceName);
return this;
}
/**
* The Simple Authentication and Security Layer (SASL) Mechanism used.
* For the valid values see
* http://www.iana.org/assignments/sasl-mechanisms/sasl-mechanisms.xhtml.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: GSSAPI
* Group: security
*
* @param saslMechanism the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder saslMechanism(
java.lang.String saslMechanism) {
doSetProperty("saslMechanism", saslMechanism);
return this;
}
/**
* Protocol used to communicate with brokers. SASL_PLAINTEXT, PLAINTEXT
* and SSL are supported.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: PLAINTEXT
* Group: security
*
* @param securityProtocol the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder securityProtocol(
java.lang.String securityProtocol) {
doSetProperty("securityProtocol", securityProtocol);
return this;
}
/**
* A list of cipher suites. This is a named combination of
* authentication, encryption, MAC and key exchange algorithm used to
* negotiate the security settings for a network connection using TLS or
* SSL network protocol.By default all the available cipher suites are
* supported.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslCipherSuites the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslCipherSuites(
java.lang.String sslCipherSuites) {
doSetProperty("sslCipherSuites", sslCipherSuites);
return this;
}
/**
* SSL configuration using a Camel SSLContextParameters object. If
* configured it's applied before the other SSL endpoint parameters.
* NOTE: Kafka only supports loading keystore from file locations, so
* prefix the location with file: in the KeyStoreParameters.resource
* option.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: security
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslContextParameters(
org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* The list of protocols enabled for SSL connections. TLSv1.2, TLSv1.1
* and TLSv1 are enabled by default.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslEnabledProtocols the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslEnabledProtocols(
java.lang.String sslEnabledProtocols) {
doSetProperty("sslEnabledProtocols", sslEnabledProtocols);
return this;
}
/**
* The endpoint identification algorithm to validate server hostname
* using server certificate.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: https
* Group: security
*
* @param sslEndpointAlgorithm the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslEndpointAlgorithm(
java.lang.String sslEndpointAlgorithm) {
doSetProperty("sslEndpointAlgorithm", sslEndpointAlgorithm);
return this;
}
/**
* The algorithm used by key manager factory for SSL connections.
* Default value is the key manager factory algorithm configured for the
* Java Virtual Machine.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: SunX509
* Group: security
*
* @param sslKeymanagerAlgorithm the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslKeymanagerAlgorithm(
java.lang.String sslKeymanagerAlgorithm) {
doSetProperty("sslKeymanagerAlgorithm", sslKeymanagerAlgorithm);
return this;
}
/**
* The password of the private key in the key store file. This is
* optional for client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslKeyPassword the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslKeyPassword(
java.lang.String sslKeyPassword) {
doSetProperty("sslKeyPassword", sslKeyPassword);
return this;
}
/**
* The location of the key store file. This is optional for client and
* can be used for two-way authentication for client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslKeystoreLocation the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslKeystoreLocation(
java.lang.String sslKeystoreLocation) {
doSetProperty("sslKeystoreLocation", sslKeystoreLocation);
return this;
}
/**
* The store password for the key store file.This is optional for client
* and only needed if ssl.keystore.location is configured.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslKeystorePassword the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslKeystorePassword(
java.lang.String sslKeystorePassword) {
doSetProperty("sslKeystorePassword", sslKeystorePassword);
return this;
}
/**
* The file format of the key store file. This is optional for client.
* Default value is JKS.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: JKS
* Group: security
*
* @param sslKeystoreType the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslKeystoreType(
java.lang.String sslKeystoreType) {
doSetProperty("sslKeystoreType", sslKeystoreType);
return this;
}
/**
* The SSL protocol used to generate the SSLContext. Default setting is
* TLS, which is fine for most cases. Allowed values in recent JVMs are
* TLS, TLSv1.1 and TLSv1.2. SSL, SSLv2 and SSLv3 may be supported in
* older JVMs, but their usage is discouraged due to known security
* vulnerabilities.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslProtocol the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslProtocol(java.lang.String sslProtocol) {
doSetProperty("sslProtocol", sslProtocol);
return this;
}
/**
* The name of the security provider used for SSL connections. Default
* value is the default security provider of the JVM.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslProvider the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslProvider(java.lang.String sslProvider) {
doSetProperty("sslProvider", sslProvider);
return this;
}
/**
* The algorithm used by trust manager factory for SSL connections.
* Default value is the trust manager factory algorithm configured for
* the Java Virtual Machine.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: PKIX
* Group: security
*
* @param sslTrustmanagerAlgorithm the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslTrustmanagerAlgorithm(
java.lang.String sslTrustmanagerAlgorithm) {
doSetProperty("sslTrustmanagerAlgorithm", sslTrustmanagerAlgorithm);
return this;
}
/**
* The location of the trust store file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslTruststoreLocation the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslTruststoreLocation(
java.lang.String sslTruststoreLocation) {
doSetProperty("sslTruststoreLocation", sslTruststoreLocation);
return this;
}
/**
* The password for the trust store file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param sslTruststorePassword the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslTruststorePassword(
java.lang.String sslTruststorePassword) {
doSetProperty("sslTruststorePassword", sslTruststorePassword);
return this;
}
/**
* The file format of the trust store file. Default value is JKS.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: JKS
* Group: security
*
* @param sslTruststoreType the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder sslTruststoreType(
java.lang.String sslTruststoreType) {
doSetProperty("sslTruststoreType", sslTruststoreType);
return this;
}
/**
* Enable usage of global SSL context parameters.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param useGlobalSslContextParameters the value to set
* @return the dsl builder
*/
default KafkaComponentBuilder useGlobalSslContextParameters(
boolean useGlobalSslContextParameters) {
doSetProperty("useGlobalSslContextParameters", useGlobalSslContextParameters);
return this;
}
}
class KafkaComponentBuilderImpl
extends
AbstractComponentBuilder<KafkaComponent>
implements
KafkaComponentBuilder {
@Override
protected KafkaComponent buildConcreteComponent() {
return new KafkaComponent();
}
private org.apache.camel.component.kafka.KafkaConfiguration getOrCreateConfiguration(
org.apache.camel.component.kafka.KafkaComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.kafka.KafkaConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "additionalProperties": getOrCreateConfiguration((KafkaComponent) component).setAdditionalProperties((java.util.Map) value); return true;
case "brokers": getOrCreateConfiguration((KafkaComponent) component).setBrokers((java.lang.String) value); return true;
case "clientId": getOrCreateConfiguration((KafkaComponent) component).setClientId((java.lang.String) value); return true;
case "configuration": ((KafkaComponent) component).setConfiguration((org.apache.camel.component.kafka.KafkaConfiguration) value); return true;
case "headerFilterStrategy": getOrCreateConfiguration((KafkaComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
case "reconnectBackoffMaxMs": getOrCreateConfiguration((KafkaComponent) component).setReconnectBackoffMaxMs((java.lang.Integer) value); return true;
case "shutdownTimeout": getOrCreateConfiguration((KafkaComponent) component).setShutdownTimeout((int) value); return true;
case "allowManualCommit": getOrCreateConfiguration((KafkaComponent) component).setAllowManualCommit((boolean) value); return true;
case "autoCommitEnable": getOrCreateConfiguration((KafkaComponent) component).setAutoCommitEnable((java.lang.Boolean) value); return true;
case "autoCommitIntervalMs": getOrCreateConfiguration((KafkaComponent) component).setAutoCommitIntervalMs((java.lang.Integer) value); return true;
case "autoCommitOnStop": getOrCreateConfiguration((KafkaComponent) component).setAutoCommitOnStop((java.lang.String) value); return true;
case "autoOffsetReset": getOrCreateConfiguration((KafkaComponent) component).setAutoOffsetReset((java.lang.String) value); return true;
case "breakOnFirstError": getOrCreateConfiguration((KafkaComponent) component).setBreakOnFirstError((boolean) value); return true;
case "bridgeErrorHandler": ((KafkaComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "checkCrcs": getOrCreateConfiguration((KafkaComponent) component).setCheckCrcs((java.lang.Boolean) value); return true;
case "consumerRequestTimeoutMs": getOrCreateConfiguration((KafkaComponent) component).setConsumerRequestTimeoutMs((java.lang.Integer) value); return true;
case "consumersCount": getOrCreateConfiguration((KafkaComponent) component).setConsumersCount((int) value); return true;
case "consumerStreams": getOrCreateConfiguration((KafkaComponent) component).setConsumerStreams((int) value); return true;
case "fetchMaxBytes": getOrCreateConfiguration((KafkaComponent) component).setFetchMaxBytes((java.lang.Integer) value); return true;
case "fetchMinBytes": getOrCreateConfiguration((KafkaComponent) component).setFetchMinBytes((java.lang.Integer) value); return true;
case "fetchWaitMaxMs": getOrCreateConfiguration((KafkaComponent) component).setFetchWaitMaxMs((java.lang.Integer) value); return true;
case "groupId": getOrCreateConfiguration((KafkaComponent) component).setGroupId((java.lang.String) value); return true;
case "groupInstanceId": getOrCreateConfiguration((KafkaComponent) component).setGroupInstanceId((java.lang.String) value); return true;
case "headerDeserializer": getOrCreateConfiguration((KafkaComponent) component).setHeaderDeserializer((org.apache.camel.component.kafka.serde.KafkaHeaderDeserializer) value); return true;
case "heartbeatIntervalMs": getOrCreateConfiguration((KafkaComponent) component).setHeartbeatIntervalMs((java.lang.Integer) value); return true;
case "keyDeserializer": getOrCreateConfiguration((KafkaComponent) component).setKeyDeserializer((java.lang.String) value); return true;
case "maxPartitionFetchBytes": getOrCreateConfiguration((KafkaComponent) component).setMaxPartitionFetchBytes((java.lang.Integer) value); return true;
case "maxPollIntervalMs": getOrCreateConfiguration((KafkaComponent) component).setMaxPollIntervalMs((java.lang.Long) value); return true;
case "maxPollRecords": getOrCreateConfiguration((KafkaComponent) component).setMaxPollRecords((java.lang.Integer) value); return true;
case "offsetRepository": getOrCreateConfiguration((KafkaComponent) component).setOffsetRepository((org.apache.camel.spi.StateRepository) value); return true;
case "partitionAssignor": getOrCreateConfiguration((KafkaComponent) component).setPartitionAssignor((java.lang.String) value); return true;
case "pollOnError": getOrCreateConfiguration((KafkaComponent) component).setPollOnError((org.apache.camel.component.kafka.PollOnError) value); return true;
case "pollTimeoutMs": getOrCreateConfiguration((KafkaComponent) component).setPollTimeoutMs((java.lang.Long) value); return true;
case "seekTo": getOrCreateConfiguration((KafkaComponent) component).setSeekTo((java.lang.String) value); return true;
case "sessionTimeoutMs": getOrCreateConfiguration((KafkaComponent) component).setSessionTimeoutMs((java.lang.Integer) value); return true;
case "specificAvroReader": getOrCreateConfiguration((KafkaComponent) component).setSpecificAvroReader((boolean) value); return true;
case "topicIsPattern": getOrCreateConfiguration((KafkaComponent) component).setTopicIsPattern((boolean) value); return true;
case "valueDeserializer": getOrCreateConfiguration((KafkaComponent) component).setValueDeserializer((java.lang.String) value); return true;
case "kafkaManualCommitFactory": ((KafkaComponent) component).setKafkaManualCommitFactory((org.apache.camel.component.kafka.KafkaManualCommitFactory) value); return true;
case "pollExceptionStrategy": ((KafkaComponent) component).setPollExceptionStrategy((org.apache.camel.component.kafka.PollExceptionStrategy) value); return true;
case "bufferMemorySize": getOrCreateConfiguration((KafkaComponent) component).setBufferMemorySize((java.lang.Integer) value); return true;
case "compressionCodec": getOrCreateConfiguration((KafkaComponent) component).setCompressionCodec((java.lang.String) value); return true;
case "connectionMaxIdleMs": getOrCreateConfiguration((KafkaComponent) component).setConnectionMaxIdleMs((java.lang.Integer) value); return true;
case "deliveryTimeoutMs": getOrCreateConfiguration((KafkaComponent) component).setDeliveryTimeoutMs((java.lang.Integer) value); return true;
case "enableIdempotence": getOrCreateConfiguration((KafkaComponent) component).setEnableIdempotence((boolean) value); return true;
case "headerSerializer": getOrCreateConfiguration((KafkaComponent) component).setHeaderSerializer((org.apache.camel.component.kafka.serde.KafkaHeaderSerializer) value); return true;
case "key": getOrCreateConfiguration((KafkaComponent) component).setKey((java.lang.String) value); return true;
case "keySerializer": getOrCreateConfiguration((KafkaComponent) component).setKeySerializer((java.lang.String) value); return true;
case "lazyStartProducer": ((KafkaComponent) component).setLazyStartProducer((boolean) value); return true;
case "lingerMs": getOrCreateConfiguration((KafkaComponent) component).setLingerMs((java.lang.Integer) value); return true;
case "maxBlockMs": getOrCreateConfiguration((KafkaComponent) component).setMaxBlockMs((java.lang.Integer) value); return true;
case "maxInFlightRequest": getOrCreateConfiguration((KafkaComponent) component).setMaxInFlightRequest((java.lang.Integer) value); return true;
case "maxRequestSize": getOrCreateConfiguration((KafkaComponent) component).setMaxRequestSize((java.lang.Integer) value); return true;
case "metadataMaxAgeMs": getOrCreateConfiguration((KafkaComponent) component).setMetadataMaxAgeMs((java.lang.Integer) value); return true;
case "metricReporters": getOrCreateConfiguration((KafkaComponent) component).setMetricReporters((java.lang.String) value); return true;
case "metricsSampleWindowMs": getOrCreateConfiguration((KafkaComponent) component).setMetricsSampleWindowMs((java.lang.Integer) value); return true;
case "noOfMetricsSample": getOrCreateConfiguration((KafkaComponent) component).setNoOfMetricsSample((java.lang.Integer) value); return true;
case "partitioner": getOrCreateConfiguration((KafkaComponent) component).setPartitioner((java.lang.String) value); return true;
case "partitionKey": getOrCreateConfiguration((KafkaComponent) component).setPartitionKey((java.lang.Integer) value); return true;
case "producerBatchSize": getOrCreateConfiguration((KafkaComponent) component).setProducerBatchSize((java.lang.Integer) value); return true;
case "queueBufferingMaxMessages": getOrCreateConfiguration((KafkaComponent) component).setQueueBufferingMaxMessages((java.lang.Integer) value); return true;
case "receiveBufferBytes": getOrCreateConfiguration((KafkaComponent) component).setReceiveBufferBytes((java.lang.Integer) value); return true;
case "reconnectBackoffMs": getOrCreateConfiguration((KafkaComponent) component).setReconnectBackoffMs((java.lang.Integer) value); return true;
case "recordMetadata": getOrCreateConfiguration((KafkaComponent) component).setRecordMetadata((boolean) value); return true;
case "requestRequiredAcks": getOrCreateConfiguration((KafkaComponent) component).setRequestRequiredAcks((java.lang.String) value); return true;
case "requestTimeoutMs": getOrCreateConfiguration((KafkaComponent) component).setRequestTimeoutMs((java.lang.Integer) value); return true;
case "retries": getOrCreateConfiguration((KafkaComponent) component).setRetries((java.lang.Integer) value); return true;
case "retryBackoffMs": getOrCreateConfiguration((KafkaComponent) component).setRetryBackoffMs((java.lang.Integer) value); return true;
case "sendBufferBytes": getOrCreateConfiguration((KafkaComponent) component).setSendBufferBytes((java.lang.Integer) value); return true;
case "valueSerializer": getOrCreateConfiguration((KafkaComponent) component).setValueSerializer((java.lang.String) value); return true;
case "workerPool": getOrCreateConfiguration((KafkaComponent) component).setWorkerPool((java.util.concurrent.ExecutorService) value); return true;
case "workerPoolCoreSize": getOrCreateConfiguration((KafkaComponent) component).setWorkerPoolCoreSize((java.lang.Integer) value); return true;
case "workerPoolMaxSize": getOrCreateConfiguration((KafkaComponent) component).setWorkerPoolMaxSize((java.lang.Integer) value); return true;
case "autowiredEnabled": ((KafkaComponent) component).setAutowiredEnabled((boolean) value); return true;
case "kafkaClientFactory": ((KafkaComponent) component).setKafkaClientFactory((org.apache.camel.component.kafka.KafkaClientFactory) value); return true;
case "synchronous": getOrCreateConfiguration((KafkaComponent) component).setSynchronous((boolean) value); return true;
case "schemaRegistryURL": getOrCreateConfiguration((KafkaComponent) component).setSchemaRegistryURL((java.lang.String) value); return true;
case "interceptorClasses": getOrCreateConfiguration((KafkaComponent) component).setInterceptorClasses((java.lang.String) value); return true;
case "kerberosBeforeReloginMinTime": getOrCreateConfiguration((KafkaComponent) component).setKerberosBeforeReloginMinTime((java.lang.Integer) value); return true;
case "kerberosInitCmd": getOrCreateConfiguration((KafkaComponent) component).setKerberosInitCmd((java.lang.String) value); return true;
case "kerberosPrincipalToLocalRules": getOrCreateConfiguration((KafkaComponent) component).setKerberosPrincipalToLocalRules((java.lang.String) value); return true;
case "kerberosRenewJitter": getOrCreateConfiguration((KafkaComponent) component).setKerberosRenewJitter((java.lang.Double) value); return true;
case "kerberosRenewWindowFactor": getOrCreateConfiguration((KafkaComponent) component).setKerberosRenewWindowFactor((java.lang.Double) value); return true;
case "saslJaasConfig": getOrCreateConfiguration((KafkaComponent) component).setSaslJaasConfig((java.lang.String) value); return true;
case "saslKerberosServiceName": getOrCreateConfiguration((KafkaComponent) component).setSaslKerberosServiceName((java.lang.String) value); return true;
case "saslMechanism": getOrCreateConfiguration((KafkaComponent) component).setSaslMechanism((java.lang.String) value); return true;
case "securityProtocol": getOrCreateConfiguration((KafkaComponent) component).setSecurityProtocol((java.lang.String) value); return true;
case "sslCipherSuites": getOrCreateConfiguration((KafkaComponent) component).setSslCipherSuites((java.lang.String) value); return true;
case "sslContextParameters": getOrCreateConfiguration((KafkaComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "sslEnabledProtocols": getOrCreateConfiguration((KafkaComponent) component).setSslEnabledProtocols((java.lang.String) value); return true;
case "sslEndpointAlgorithm": getOrCreateConfiguration((KafkaComponent) component).setSslEndpointAlgorithm((java.lang.String) value); return true;
case "sslKeymanagerAlgorithm": getOrCreateConfiguration((KafkaComponent) component).setSslKeymanagerAlgorithm((java.lang.String) value); return true;
case "sslKeyPassword": getOrCreateConfiguration((KafkaComponent) component).setSslKeyPassword((java.lang.String) value); return true;
case "sslKeystoreLocation": getOrCreateConfiguration((KafkaComponent) component).setSslKeystoreLocation((java.lang.String) value); return true;
case "sslKeystorePassword": getOrCreateConfiguration((KafkaComponent) component).setSslKeystorePassword((java.lang.String) value); return true;
case "sslKeystoreType": getOrCreateConfiguration((KafkaComponent) component).setSslKeystoreType((java.lang.String) value); return true;
case "sslProtocol": getOrCreateConfiguration((KafkaComponent) component).setSslProtocol((java.lang.String) value); return true;
case "sslProvider": getOrCreateConfiguration((KafkaComponent) component).setSslProvider((java.lang.String) value); return true;
case "sslTrustmanagerAlgorithm": getOrCreateConfiguration((KafkaComponent) component).setSslTrustmanagerAlgorithm((java.lang.String) value); return true;
case "sslTruststoreLocation": getOrCreateConfiguration((KafkaComponent) component).setSslTruststoreLocation((java.lang.String) value); return true;
case "sslTruststorePassword": getOrCreateConfiguration((KafkaComponent) component).setSslTruststorePassword((java.lang.String) value); return true;
case "sslTruststoreType": getOrCreateConfiguration((KafkaComponent) component).setSslTruststoreType((java.lang.String) value); return true;
case "useGlobalSslContextParameters": ((KafkaComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
default: return false;
}
}
}
} | apache-2.0 |
wates/aws-sdk-cpp | aws-cpp-sdk-sqs/source/model/ListQueuesResult.cpp | 1918 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/sqs/model/ListQueuesResult.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <utility>
using namespace Aws::SQS::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils;
using namespace Aws;
ListQueuesResult::ListQueuesResult()
{
}
ListQueuesResult::ListQueuesResult(const AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
ListQueuesResult& ListQueuesResult::operator =(const AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode;
if (rootNode.GetName() != "ListQueuesResult")
{
resultNode = rootNode.FirstChild("ListQueuesResult");
}
if(!resultNode.IsNull())
{
XmlNode queueUrlsNode = resultNode.FirstChild("QueueUrl");
if(!queueUrlsNode.IsNull())
{
XmlNode queueUrlMember = queueUrlsNode;
while(!queueUrlMember.IsNull())
{
m_queueUrls.push_back(StringUtils::Trim(queueUrlMember.GetText().c_str()));
queueUrlMember = queueUrlMember.NextNode("QueueUrl");
}
}
}
XmlNode responseMetadataNode = rootNode.FirstChild("ResponseMetadata");
m_responseMetadata = responseMetadataNode;
return *this;
}
| apache-2.0 |
a-manumohan/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/ServerTest.scala | 10980 | package com.twitter.finagle.mux
import com.twitter.concurrent.AsyncQueue
import com.twitter.conversions.time._
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.mux.lease.exp.{Lessor, nackOnExpiredLease}
import com.twitter.finagle.mux.transport.Message
import com.twitter.finagle.netty3.BufChannelBuffer
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.tracing.NullTracer
import com.twitter.finagle.transport.{QueueTransport, Transport}
import com.twitter.finagle.{Dtab, Failure, Path, Service}
import com.twitter.io.Buf.Utf8
import com.twitter.io.{Buf, Charsets}
import com.twitter.util.{Await, Duration, Future, Promise, Return, Throw, Time}
import java.security.cert.X509Certificate
import org.jboss.netty.buffer.ChannelBuffers
import org.junit.runner.RunWith
import org.mockito.Matchers.any
import org.mockito.Mockito.{never, verify, when}
import org.scalatest.FunSuite
import org.scalatest.junit.{AssertionsForJUnit, JUnitRunner}
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class ServerTest extends FunSuite with MockitoSugar with AssertionsForJUnit {
private class LeaseCtx {
val clientToServer = new AsyncQueue[Message]
val serverToClient = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val service = mock[Service[Request, Response]]
val lessor = mock[Lessor]
val server = ServerDispatcher.newRequestResponse(
transport, service, lessor, NullTracer, NullStatsReceiver)
def issue(lease: Duration) {
val m = serverToClient.poll()
assert(!m.isDefined)
server.issue(lease)
assert(m.isDefined)
checkFuture(m, Message.Tlease(lease))
}
def demonstrateNack() {
val m = serverToClient.poll()
assert(!m.isDefined)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
assert(m.isDefined)
checkFuture(m, Message.RdispatchNack(0, Seq.empty))
}
def demonstrateNoNack() {
val m = serverToClient.poll()
assert(!m.isDefined)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
assert(!m.isDefined)
}
}
private[this] def checkFuture(actual: Future[Message], expected: Message) {
actual.poll match {
case Some(Return(msg)) => assert(msg == expected)
case _ => fail()
}
}
test("register/unregister with lessor") {
val ctx = new LeaseCtx
import ctx._
verify(lessor).register(server)
verify(lessor, never()).unregister(server)
clientToServer.fail(new Exception)
verify(lessor).unregister(server)
}
test("propagate leases") {
val ctx = new LeaseCtx
import ctx._
val m = serverToClient.poll()
assert(!m.isDefined)
server.issue(123.milliseconds)
assert(m.isDefined)
assert(Await.result(m) == Message.Tlease(123.milliseconds))
}
test("nack on 0 leases") {
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(Duration.Zero)
demonstrateNack()
}
test("don't nack on > 0 leases") {
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(1.millisecond)
demonstrateNoNack()
}
test("unnack again after a > 0 lease") {
Time.withCurrentTimeFrozen { ctl =>
val ctx = new LeaseCtx
import ctx._
nackOnExpiredLease.parse("true")
issue(Duration.Zero)
demonstrateNack()
ctl.advance(2.seconds)
issue(1.second)
demonstrateNoNack()
}
}
test("does not leak pending on failures") {
val p = new Promise[Response]
val svc = Service.mk[Request, Response](_ => p)
val msg = Message.Treq(tag = 9, traceId = None, ChannelBuffers.EMPTY_BUFFER)
val trans = mock[Transport[Message, Message]]
when(trans.onClose)
.thenReturn(new Promise[Throwable])
when(trans.read())
.thenReturn(Future.value(msg))
.thenReturn(Future.never)
when(trans.write(any[Message]))
.thenReturn(Future.Done)
when(trans.peerCertificate)
.thenReturn(None)
val dispatcher = ServerDispatcher.newRequestResponse(
trans, svc, Lessor.nil, NullTracer, NullStatsReceiver)
assert(dispatcher.npending() == 1)
p.updateIfEmpty(Throw(new RuntimeException("welp")))
assert(dispatcher.npending() == 0)
}
test("nack on restartable failures") {
val svc = new Service[Request, Response] {
def apply(req: Request) = Future.exception(Failure.rejected("overloaded!"))
}
val clientToServer = new AsyncQueue[Message]
val serverToClient = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val server = ServerDispatcher.newRequestResponse(
transport, svc, Lessor.nil, NullTracer, NullStatsReceiver)
clientToServer.offer(
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
val reply = serverToClient.poll()
assert(reply.isDefined)
assert(Await.result(reply).isInstanceOf[Message.RdispatchNack])
}
test("drains properly before closing the socket") {
Time.withCurrentTimeFrozen { ctl =>
val buf = ChannelBuffers.copiedBuffer("OK", Charsets.Utf8)
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val p = Promise[Response]
var req: Request = null
val server = ServerDispatcher.newRequestResponse(
transport,
Service.mk { _req: Request =>
req = _req
p
}
)
clientToServer.offer(Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, buf))
// one outstanding request
val drain = server.close(Time.Top) // synchronously sends drain request to client
clientToServer.offer(Message.Rdrain(1)) // client draining
assert(!drain.isDefined) // one outstanding request
p.setValue(Response(Buf.Utf8("KO")))
assert(drain.isDefined) // zero outstanding requests
}
}
test("drains properly before closing the socket with two outstanding") {
Time.withCurrentTimeFrozen { ctl =>
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
var promises: List[Promise[Response]] = Nil
val server = ServerDispatcher.newRequestResponse(
transport, Service.mk { _: Request =>
val p = Promise[Response]()
promises ::= p
p
})
clientToServer.offer(
Message. Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
// one outstanding request
clientToServer.offer(
Message.Tdispatch(1, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
// two outstanding requests
val drain = server.close(Time.Top) // synchronously sends drain request to client
clientToServer.offer(Message.Rdrain(1)) // client draining
assert(!drain.isDefined) // two outstanding request
assert(server.npending() == 2) // two outstanding request
promises(0).setValue(Response.empty)
assert(server.npending() == 1) // one outstanding request
assert(!drain.isDefined) // one outstanding request
promises(1).setValue(Response.empty)
assert(server.npending() == 0) // zero outstanding request
assert(drain.isDefined) // zero outstanding requests
}
}
test("closes properly without outstanding requests") {
Time.withCurrentTimeFrozen { ctl =>
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer)
val server = ServerDispatcher.newRequestResponse(
transport, Service.mk(req => Future.???))
val drain = server.close(Time.Top) // synchronously sends drain request to client
val Some(Return(tdrain)) = serverToClient.poll.poll
val Message.Tdrain(tag) = tdrain
assert(!drain.isDefined) // client hasn't acked
clientToServer.offer(Message.Rdrain(tag)) // client draining
assert(drain.isDefined) // safe to shut down
}
}
private[this] class Server(svc: Service[Request, Response], peerCert: Option[X509Certificate] = None) {
val serverToClient = new AsyncQueue[Message]
val clientToServer = new AsyncQueue[Message]
val transport = new QueueTransport(writeq=serverToClient, readq=clientToServer) {
override def peerCertificate = peerCert
}
def ping() = Future.Done
val server = ServerDispatcher.newRequestResponse(transport, svc)
def request(msg: Message): Unit = clientToServer.offer(msg)
def read(): Future[Message] = serverToClient.poll
}
test("starts nacking only after receiving an rdrain") {
Time.withCurrentTimeFrozen { ctl =>
import Message._
val server = new Server(Service.mk { req: Request =>
Future.value(Response.empty)
})
server.request( // request before closing
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
assert(server.read().isDefined)
val drain = server.server.close(Time.Top) // synchronously sends drain request to client
val Some(Return(tdrain)) = server.read().poll
val Tdrain(tag) = tdrain
server.request( // request after sending tdrain, before getting rdrain
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
assert(server.read().isDefined)
assert(!drain.isDefined) // client hasn't acked
server.request(Rdrain(tag)) // client draining
assert(drain.isDefined) // safe to shut down
server.request( // request after closing down
Message.Tdispatch(0, Seq.empty, Path.empty, Dtab.empty, ChannelBuffers.EMPTY_BUFFER))
val Some(Return(rdrain)) = server.read().poll
assert(rdrain.isInstanceOf[RdispatchNack])
}
}
test("propagates peer certificates") {
val mockCert = mock[X509Certificate]
val okResponse = Response(Utf8("ok"))
val failResponse = Response(Utf8("fail"))
val testService = new Service[Request, Response] {
override def apply(request: Request): Future[Response] = Future.value {
if (Contexts.local.get(Transport.peerCertCtx) == Some(mockCert)) okResponse else failResponse
}
}
val tag = 3
val server = new Server(testService, Some(mockCert))
val req = Message.Treq(tag, None, BufChannelBuffer(Request.empty.body))
server.request(req)
val Some(Return(res)) = server.read().poll
assert(res == Message.RreqOk(tag, BufChannelBuffer(okResponse.body)))
}
}
| apache-2.0 |
helight/helight_code | go_code/ichat/ichat.go | 2383 | package main
import (
"fmt"
"context"
)
func GetNews() {
//先进入gocn的每日新闻列表
doc, err := goquery.NewDocument("https://gocn.vip/explore/category-14")
if err != nil {
ctx.Error("New index document fail, err :%v\n", err)
return
}
list := doc.Find("a:contains(GoCN)")
if list == nil {
fmt.Printf("list is empty")
return
}
//获取每日新闻列表中的第一个 & title contains todayDate
var newUrl string
var title string
list.Each(func(i int, selection *goquery.Selection) {
if len(newUrl) != 0 {
return
}
val, _ := selection.Attr("href")
title = selection.Text()
todayDate := time.Now().Format("2006-01-02")
if strings.Contains(val, "gocn.vip/question") == true && strings.Contains(title, todayDate){
newUrl = val
}
})
ctx.Info("newUrl :%s\n", newUrl)
//检测newUrl是否已经用过
/*used, err := isUrlUsed(ctx, newUrl)
if err != nil {
ctx.Error("Check url if_used fail, err :%v, url :%s", err, newUrl)
//todo monitor
return
}
if used {
ctx.Info("Url is used, url :%s", newUrl)
return
}*/
//进入新url,获取内容
newDoc, err := goquery.NewDocument(newUrl)
if err != nil {
ctx.Error("open new url fail, err :%v, newUrl :%s", err, newUrl)
return
}
modClass := newDoc.Find("div.content").Find("li")
if modClass == nil {
ctx.Error("mod class is nil, newUrl :%s", newUrl)
return
}
contentBuf := []byte{}
counter := 1 //计算每个内容的序号
modClass.Each(func(i int, selection * goquery.Selection) {
urlIndex := strings.Index(selection.Text(), "http")
aTag := selection.Find("a")
if aTag == nil {
ctx.Error("A tag not found, selection Text :%s", selection.Text())
return
}
urlStr, exist := aTag.Attr("href") //获取a标签中的href
if exist {
contentStr := selection.Text()[0:urlIndex]
contentBuf = append(contentBuf, []byte(fmt.Sprintf(">%d. [%s](%s)\n", counter,strings.TrimSpace(contentStr), urlStr))...)
}
/*else {
contentBuf = append(contentBuf, []byte(fmt.Sprintf(">%d. %s\n", counter, selection.Text()))...)
}*/
counter ++
})
//发送企业微信消息
err = notify_data.NotifyData(handler.NewContext(context.Background(), os.Stdout), notify_data.KEY_TYPE_BINGO, title+"\n"+string(contentBuf))
if err != nil {
ctx.Error("Notify wx fail, err :%v", err)
//todo monitor
}
}
func main() int {
return 0
} | apache-2.0 |
msgilligan/bitcoinj-addons | consensusj-jsonrpc-daemon/src/main/java/org/consensusj/jsonrpc/daemon/EchoJsonRpcService.java | 880 | package org.consensusj.jsonrpc.daemon;
import org.consensusj.jsonrpc.introspection.AbstractJsonRpcService;
import org.consensusj.jsonrpc.introspection.JsonRpcServiceWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Singleton;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.Method;
import java.util.Map;
/**
* Simple Echo JSON-RPC Service
*/
@Singleton
public class EchoJsonRpcService extends AbstractJsonRpcService {
private static Logger log = LoggerFactory.getLogger(EchoJsonRpcService.class);
private static final Map<String, Method> methods = JsonRpcServiceWrapper.reflect(MethodHandles.lookup().lookupClass());
public EchoJsonRpcService() {
super(methods);
}
public String echo(String message) {
log.debug("EchoJsonRpcService: echo {}",message);
return message;
}
}
| apache-2.0 |
MICommunity/psi-jami | jami-bridges/jami-ontology-manager/src/main/java/psidev/psi/mi/jami/bridges/ontologymanager/MIOntologyTermI.java | 1006 | package psidev.psi.mi.jami.bridges.ontologymanager;
import psidev.psi.mi.jami.model.OntologyTerm;
import psidev.psi.tools.ontology_manager.interfaces.OntologyTermI;
import java.util.Set;
/**
* Extension of OntologyTermI for jami
*
* @author Marine Dumousseau (marine@ebi.ac.uk)
* @version $Id$
* @since <pre>01/11/11</pre>
*/
public interface MIOntologyTermI extends OntologyTermI {
/**
* <p>getObsoleteMessage.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getObsoleteMessage();
/**
* <p>getRemappedTerm.</p>
*
* @return a {@link java.lang.String} object.
*/
public String getRemappedTerm();
/**
* <p>getPossibleTermsToRemapTo.</p>
*
* @return a {@link java.util.Set} object.
*/
public Set<String> getPossibleTermsToRemapTo();
/**
* <p>getDelegate.</p>
*
* @return a {@link psidev.psi.mi.jami.model.OntologyTerm} object.
*/
public OntologyTerm getDelegate();
}
| apache-2.0 |
Redvvolf/SecureSharedPref | app/src/main/java/com/nurisezgin/securesharedpreferences/pref/crypto/AESCrypto.java | 1767 | package com.nurisezgin.securesharedpreferences.pref.crypto;
import android.util.Base64;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
/**
* Created by nurisezgin on 08/02/2017.
* This is sample class for how to implementation "Cryptography" for your own SharedPreferences
* Provider.
*/
public class AESCrypto implements ICryptography {
private final String KEY = "?!sampleKey4580dsd";
@Override
public String encode(String raw) {
try {
byte[] encoded = encrypt(raw);
byte[] base64Encoded = Base64.encode(encoded, Base64.NO_WRAP);
return new String(base64Encoded);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
public String decode(String encoded) {
try {
byte[] base64Decoded = Base64.decode(encoded, Base64.NO_WRAP);
byte[] decoded = decrypt(base64Decoded);
return new String(decoded);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private byte[] encrypt(String raw) throws Exception {
byte[] k = KEY.getBytes();
SecretKeySpec keySpec = new SecretKeySpec(k, 0, 16, "AES");
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
cipher.init(Cipher.ENCRYPT_MODE, keySpec);
return cipher.doFinal(raw.getBytes());
}
private byte[] decrypt(byte[] encoded) throws Exception {
byte[] k = KEY.getBytes();
SecretKeySpec keySpec = new SecretKeySpec(k, 0, 16, "AES");
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding");
cipher.init(Cipher.DECRYPT_MODE, keySpec);
return cipher.doFinal(encoded);
}
}
| apache-2.0 |
8090boy/gomall.la | legendshop/src/java/com/legendshop/business/controller/MessageController.java | 3734 | /*
*
* LegendShop 多用户商城系统
*
* 版权所有,并保留所有权利。
*
*/
package com.legendshop.business.controller;
import java.util.ResourceBundle;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import com.legendshop.core.UserManager;
import com.legendshop.core.base.AdminController;
import com.legendshop.core.base.BaseController;
import com.legendshop.core.constant.SysParameterEnum;
import com.legendshop.core.dao.support.CriteriaQuery;
import com.legendshop.core.dao.support.PageSupport;
import com.legendshop.core.helper.PropertiesUtil;
import com.legendshop.model.entity.Message;
import com.legendshop.spi.service.MessageService;
/**
* The Class MessageController
*
*/
@Controller
@RequestMapping("/message")
public class MessageController extends BaseController implements AdminController<Message, String> {
@Autowired
private MessageService messageService;
@RequestMapping("/query")
public String query(HttpServletRequest request, HttpServletResponse response, String curPageNO, Message message) {
CriteriaQuery cq = new CriteriaQuery(Message.class, curPageNO);
cq.setPageSize(PropertiesUtil.getObject(SysParameterEnum.PAGE_SIZE, Integer.class));
cq = hasAllDataFunction(cq, request, StringUtils.trim(message.getSender()));
/*
* //TODO add your condition
*/
cq.add();
PageSupport ps = messageService.getMessage(cq);
ps.savePage(request);
request.setAttribute("message", message);
return "/message/messageList";
}
@RequestMapping(value = "/save")
public String save(HttpServletRequest request, HttpServletResponse response, Message message) {
messageService.saveMessage(message);
saveMessage(request, ResourceBundle.getBundle("i18n/ApplicationResources").getString("operation.successful"));
return "forward:/admin/message/query.htm";
}
@RequestMapping(value = "/delete/{id}")
public String delete(HttpServletRequest request, HttpServletResponse response, @PathVariable String id) {
Message message = messageService.getMessage(id);
String result = checkPrivilege(request, UserManager.getUserName(request.getSession()), message.getSender());
if (result != null) {
return result;
}
messageService.deleteMessage(message);
saveMessage(request, ResourceBundle.getBundle("i18n/ApplicationResources").getString("entity.deleted"));
return "forward:/admin/message/query.htm";
}
@RequestMapping(value = "/load/{id}")
public String load(HttpServletRequest request, HttpServletResponse response, @PathVariable String id) {
Message message = messageService.getMessage(id);
String result = checkPrivilege(request, UserManager.getUserName(request.getSession()), message.getSender());
if (result != null) {
return result;
}
request.setAttribute("#entityClassInstance", message);
return "/message/message";
}
@RequestMapping(value = "/load")
public String load(HttpServletRequest request, HttpServletResponse response) {
return "/message/message";
}
@RequestMapping(value = "/update")
public String update(HttpServletRequest request, HttpServletResponse response, @PathVariable String id) {
Message message = messageService.getMessage(id);
String result = checkPrivilege(request, UserManager.getUserName(request.getSession()), message.getSender());
if (result != null) {
return result;
}
request.setAttribute("message", message);
return "forward:/admin/message/query.htm";
}
}
| apache-2.0 |
torrances/swtk-commons | commons-dict-wiktionary/src/main/java/org/swtk/commons/dict/wiktionary/generated/c/e/v/WiktionaryCEV000.java | 990 | package org.swtk.commons.dict.wiktionary.generated.c.e.v; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.swtk.common.dict.dto.wiktionary.Entry; import com.trimc.blogger.commons.utils.GsonUtils; public class WiktionaryCEV000 { private static Map<String, Entry> map = new HashMap<String, Entry>(); static { add("cevapcici", "{\"term\":\"cevapcici\", \"etymology\":{\"influencers\":[], \"languages\":[], \"text\":\"Anglicised spelling of \u0027ćevapčići\u0027\"}, \"definitions\":{\"list\":[{\"upperType\":\"NOUN\", \"text\":\"A Balkan dish of grilled minced meat\", \"priority\":1}]}, \"synonyms\":{}}");
} private static void add(String term, String json) { map.put(term, GsonUtils.toObject(json, Entry.class)); } public static Entry get(String term) { return map.get(term); } public static boolean has(String term) { return null != get(term); } public static Collection<String> terms() { return map.keySet(); } } | apache-2.0 |
EsriUK/OnlineServicesAddin | DataHubServicesAddin/Properties/AssemblyInfo.cs | 1434 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("DataHubServicesAddin")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("DataHubServicesAddin")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("61f6d0d7-dfac-4c50-8b2d-f6da58c4f51d")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| apache-2.0 |
LegNeato/buck | src/com/facebook/buck/apple/SceneKitAssets.java | 5574 | /*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import com.facebook.buck.apple.toolchain.AppleCxxPlatform;
import com.facebook.buck.core.build.buildable.context.BuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.description.BuildRuleParams;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.Flavor;
import com.facebook.buck.core.model.InternalFlavor;
import com.facebook.buck.core.rulekey.AddToRuleKey;
import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath;
import com.facebook.buck.core.sourcepath.SourcePath;
import com.facebook.buck.core.toolchain.tool.Tool;
import com.facebook.buck.io.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.model.BuildTargets;
import com.facebook.buck.rules.AbstractBuildRuleWithDeclaredAndExtraDeps;
import com.facebook.buck.shell.ShellStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.CopyStep;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
public class SceneKitAssets extends AbstractBuildRuleWithDeclaredAndExtraDeps {
public static final Flavor FLAVOR = InternalFlavor.of("scenekit-assets");
@AddToRuleKey private final Optional<Tool> copySceneKitAssets;
@AddToRuleKey private final ImmutableSet<SourcePath> sceneKitAssetsPaths;
@AddToRuleKey private final String sdkName;
@AddToRuleKey private final String minOSVersion;
private final Path outputDir;
SceneKitAssets(
BuildTarget buildTarget,
ProjectFilesystem projectFilesystem,
BuildRuleParams params,
AppleCxxPlatform appleCxxPlatform,
ImmutableSet<SourcePath> sceneKitAssetsPaths) {
super(buildTarget, projectFilesystem, params);
this.sceneKitAssetsPaths = sceneKitAssetsPaths;
String outputDirString =
BuildTargets.getGenPath(getProjectFilesystem(), buildTarget, "%s").toString();
this.outputDir = Paths.get(outputDirString);
this.sdkName = appleCxxPlatform.getAppleSdk().getName();
this.minOSVersion = appleCxxPlatform.getMinVersion();
this.copySceneKitAssets = appleCxxPlatform.getCopySceneKitAssets();
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context, BuildableContext buildableContext) {
ImmutableList.Builder<Step> stepsBuilder = ImmutableList.builder();
stepsBuilder.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
context.getBuildCellRootPath(), getProjectFilesystem(), outputDir)));
for (SourcePath inputPath : sceneKitAssetsPaths) {
Path absoluteInputPath = context.getSourcePathResolver().getAbsolutePath(inputPath);
if (copySceneKitAssets.isPresent()) {
stepsBuilder.add(
new ShellStep(Optional.of(getBuildTarget()), getProjectFilesystem().getRootPath()) {
@Override
protected ImmutableList<String> getShellCommandInternal(
ExecutionContext executionContext) {
ImmutableList.Builder<String> commandBuilder = ImmutableList.builder();
commandBuilder.addAll(
copySceneKitAssets.get().getCommandPrefix(context.getSourcePathResolver()));
commandBuilder.add(
absoluteInputPath.toString(),
"-o",
getProjectFilesystem()
.resolve(outputDir)
.resolve(absoluteInputPath.getFileName())
.toString(),
"--target-platform=" + sdkName,
"--target-version=" + minOSVersion);
return commandBuilder.build();
}
@Override
public ImmutableMap<String, String> getEnvironmentVariables(
ExecutionContext executionContext) {
return copySceneKitAssets.get().getEnvironment(context.getSourcePathResolver());
}
@Override
public String getShortName() {
return "copy-scenekit-assets";
}
});
} else {
stepsBuilder.add(
CopyStep.forDirectory(
getProjectFilesystem(),
absoluteInputPath,
outputDir,
CopyStep.DirectoryMode.CONTENTS_ONLY));
}
}
buildableContext.recordArtifact(
context.getSourcePathResolver().getRelativePath(getSourcePathToOutput()));
return stepsBuilder.build();
}
@Override
public SourcePath getSourcePathToOutput() {
return ExplicitBuildTargetSourcePath.of(getBuildTarget(), outputDir);
}
}
| apache-2.0 |
icemagno/mclm | src/main/webapp/app/store/LayerTree.js | 2307 | Ext.define('MCLM.store.LayerTree', {
extend: 'Ext.data.TreeStore',
requires: [
'MCLM.model.LayerTreeModel'
],
storeId: 'store.layerTree',
autoSync: false,
model: 'MCLM.model.LayerTreeModel',
remoteFilter: true,
proxy: {
type: 'ajax',
reader: {
type: 'json'
},
api: {
read: 'getLayersCatalogoTreeNode',
update: 'updateLayersCatalogoTreeNode'
},
writer: {
type: 'json',
allowSingle: false,
writeAllFields: true,
encode: true,
rootProperty: 'data'
}
},
root: {
text: 'APOLO',
id: 0,
index: 0,
description: 'Raiz'
},
listeners: {
load: function (store, records) {
// Ao recarregar os filhos de um no, verificar se algum deles ja
// estava selecionado (a camada ja existia no mapa)
// pois ao recarregar eles vem desmarcados.
// Se existir, marca novamente
for (var x = 0; x < records.length; x++) {
var serial = records[x].data.serialId;
var layer = MCLM.Map.findLayerBySerialId(serial);
if (layer) {
records[x].data.checked = true;
records[x].data.selected = true;
}
records[x].data.originalIconCls = records[x].data.iconCls;
records[x].data.errorIconCls = 'error-icon';
records[x].data.loadingIconCls = 'loading-icon';
}
if (store.isFiltered())
store.getRoot().expand(true);
},
nodemove: function (movedNode, oldParentNode, newParentNode, newIndex) {
//Apenas se o usuário for um Admin...
if (MCLM.Globals.isAdmin()) {
var layerTree = Ext.getCmp("layerTree");
var rootNode = layerTree.getRootNode();
movedNode.set('parentNodeId', newParentNode.data.nodeDataId);
rootNode.cascadeBy(node => {
node.set('indexOrder', node.data.index);
node.dirty = true;
});
Ext.getStore('store.layerTree').sync();
}
}
}
}); | apache-2.0 |
gchq/stroom | stroom-ui/src/components/Authentication/usePasswordPolicy.ts | 674 | // Get token config
import { useEffect, useState } from "react";
import { PasswordPolicyConfig } from "api/stroom";
import { useStroomApi } from "lib/useStroomApi";
export const usePasswordPolicy = (): PasswordPolicyConfig => {
const [passwordPolicyConfig, setPasswordPolicyConfig] =
useState<PasswordPolicyConfig>(undefined);
const { exec } = useStroomApi();
useEffect(() => {
console.log("Fetching password policy config");
exec(
(api) => api.authentication.fetchPasswordPolicy(),
(passwordPolicyConfig: PasswordPolicyConfig) =>
setPasswordPolicyConfig(passwordPolicyConfig),
);
}, [exec]);
return passwordPolicyConfig;
};
| apache-2.0 |
mscook/pyParaTools | ParaUtils.py | 8999 | """Utility methods for paramagnetic observables """
import math
from numpy import *
def ZXZRot(A, B, G, scal=1.0):
"""
Builds the ZXZ rotation matrix given 3 Euler Angles. See:
http://mathworld.wolfram.com/EulerAngles.html
@param A : The (A)lpha angle
@type A : float
@param B : The (B)eta angle
@type B : float
@param G : The (G)amma angle
@type G : float
@param val: (OPTIONAL) Such that we can scale the rotation matix
is for the X-tensor frame determination
@type val : float
"""
rot = zeros((3,3))
ca = math.cos(math.radians(A))
cb = math.cos(math.radians(B))
cg = math.cos(math.radians(G))
sa = math.sin(math.radians(A))
sb = math.sin(math.radians(B))
sg = math.sin(math.radians(G))
rot[0][0] = (( cg * ca) - (cb * sa * sg))*scal
rot[0][1] = (( cg * sa) + (cb * ca * sg))*scal
rot[0][2] = (( sg * sb))*scal
rot[1][0] = ((-sg * ca) - (cb * sa * cg))*scal
rot[1][1] = ((-sg * sa) + (cb * ca * cg))*scal
rot[1][2] = (( cg * sb))*scal
rot[2][0] = (( sb * sa))*scal
rot[2][1] = ((-sb * ca))*scal
rot[2][2] = (cb)*scal
return rot
def ZYZRot(A, B, G, scal=1.0):
"""
.Builds the ZYZ rotation matrix given 3 Euler Angles. See:
http://mathworld.wolfram.com/EulerAngles.html
@param A: The (A)lpha angle
@type A : float
@param B: The (B)eta angle
@type B : float
@param G: The (G)amma angle
@type G : float
@param val: (OPTIONAL) Such that we can scale the rotation matix
is for the X-tensor frame determination
@type val : float
"""
rot = zeros((3,3))
ca = math.cos(math.radians(A))
cb = math.cos(math.radians(B))
cg = math.cos(math.radians(G))
sa = math.sin(math.radians(A))
sb = math.sin(math.radians(B))
sg = math.sin(math.radians(G))
rot[0][0] = ((-sg * sa) + (cb * ca * cg))*scal
rot[0][1] = (( sg * ca) + (cb * sa * cg))*scal
rot[0][2] = (( -cg * sb))*scal
rot[1][0] = ((-cg * sa) - (cb * ca * sg))*scal
rot[1][1] = ((cg * ca) - (cb * sa * sg))*scal
rot[1][2] = (( sg * sb))*scal
rot[2][0] = (( sb * ca))*scal
rot[2][1] = ((sb * sa))*scal
rot[2][2] = (cb)*scal
return rot
def RotX90():
"""
.Builds the rotation matrix for 90 deg rotation about X
1, 0, 0, 0, 0, 1, 0, -1, 0
"""
rot = zeros((3,3))
rot[0][0] = 1.0
rot[1][2] = 1.0
rot[2][1] = -1.0
return rot
def RotY90():
"""
.Builds the rotation matrix for 90 deg rotation about Y
0, 0, -1, 0, 1, 0, 1, 0, 0
"""
rot = zeros((3,3))
rot[0][2] = -1.0
rot[1][1] = 1.0
rot[2][0] = 1.0
return rot
def RotZ90():
"""
.Builds the rotation matrix for 90 deg rotation about Z
0, 1, 0, -1, 0, 0, 0, 0, 1
"""
rot = zeros((3,3))
rot[0][1] = 1.0
rot[1][0] = -1.0
rot[2][2] = 1.0
return rot
def correctRofAngles(cosv, sinv):
#TODO: Check that this is correct
if (cosv <= math.pi/2.0):
if (sinv < 0.0):
sinv = sinv + 2*math.pi
return sinv
else:
return sinv
else:
if(sinv > 0.0):
return cosv
else:
return -1*(cosv) +2*math.pi
def ABGFromRotMatrixZYZ(rotMat):
#TODO: Check these are correct!
#TODO: Add the corresponding ZXZ method
b_c = math.acos(rotMat[2,2])
a_c = math.acos(rotMat[2,0]/math.sin(b_c))
g_c = math.acos(-1*rotMat[0,2]/math.sin(b_c))
a_s = math.asin(rotMat[2,1]/math.sin(b_c))
g_s = math.asin(rotMat[1,2]/math.sin(b_c))
aE = correctRofAngles(a_c, a_s)
bE = b_c
gE = correctRofAngles(g_c, g_s)
return aE, bE, gE
def FromVVU(AxorRh):
"""
Convert from van Vleck Units (vvu = m3/3.77 10-35)
@param AxorRh: Axial or Rhombic component
@type AxorRh : float
"""
return AxorRh/(1./((12*math.pi))*10000)
def ToVVU(AxorRh):
"""
Convert to van Vleck Units (vvu = m3/3.77 10-35)
@param AxorRh: Axial or Rhombic component
@type AxorRh : float
"""
return AxorRh*(1./((12*math.pi))*10000)
def FixAngle(angle):
"""
To fix up the angles after optimization as they are not [0:2pi] bound
@param angle: An Euler angle determined from the optimization
@type angle: float
"""
while angle > 0.0:
angle = angle - 360.0
while angle < 0.0:
angle = angle + 360.0
return angle
def SwapVals(val1, val2):
temp = 0.0
temp = val1
val1 = val2
val2 = temp
return val1, val2
def lookupMGR(spin_type):
"""
Return the gyromagnetic ratios for the coupling.
See: http://nmrwiki.org/wiki/index.php?title=Gyromagnetic_ratio
"""
#TODO: These need to be checked
PI2 = 2*math.pi
H1mgr = (PI2*42.576)*1e6
C13mgr = (PI2*10.705)*1e6
Nmgr = []
N14mgr = (PI2*3.0766)*1e6
N15mgr = (PI2*-4.315)*1e6
Nmgr.append(N14mgr)
Nmgr.append(N15mgr)
O17mgr = (PI2*-5.7716)*1e6
mgr = {'H':H1mgr, 'C':C13mgr, 'N':Nmgr, 'O':O17mgr}
return mgr[spin_type]
def rdcScal(S, g1, g2, B0, temp):
"""
Scaling constant.for RDC calculations
"""
#TODO: These need to be checked
hbar = 1.05457148e-34
kboltz = 1.3806503e-23
scal = -S*g1*g2*hbar*B0*B0 / (8*15*math.pi*math.pi*kboltz*temp)
return scal*0.01
def FitSummary(soln,cov,info,mesg,success, p0, y_meas, tof):
scal = 1.0
if tof == 2 or tof == 3:
#The effective strength of the X-tensor is 1/2ved in monomer fits
scal = 2.0
f_type = { \
0:'Standard X-tensor optimization', \
1:'Standard X-tensor optimization (fixed metal position)', \
2:'X-tensor optimization to dimer', \
3:'X-tensor optimization to dimer (fixed metal position)', \
4:'2 X-tensors to a monomer', \
5:'2 X-tensors (1 fixed metal site) to a monomer', \
6:'2 X-tensors (2 fixed metal sites) to a monomer' }
print 80*'-'
print "Fitting Results: ", f_type[tof]
print 80*'-'
if success==1:
print "We have converged to a minima"
else:
print "We have failed to converge"
print "REASON:", mesg
# calculate final chi square
chisq=sum(info["fvec"]*info["fvec"])
dof=len(y_meas)-len(p0)
# chisq, sqrt(chisq/dof) agrees with gnuplot
print "* Converged with chi squared: ",chisq
print "* Degrees of freedom, dof: ", dof
print "* RMS of residuals (i.e. sqrt(chisq/dof)): ", sqrt(chisq/dof)
print "* Reduced chisq (i.e. variance of residuals): ", chisq/dof
print
# uncertainties are calculated as per gnuplot, "fixing" the result
# for non unit values of the reduced chisq.
# values at min match gnuplot
print "Fitted parameters at minimum, with 68% C.I.:"
print "%s%7s%11s%13s" % ("Param", "Init", "Final", "Error")
#NOTE: The confidence intervals may not be correct due to conversion to VVU etc.
if tof == 0 or tof == 2 or tof ==4:
for i,pmin in enumerate(soln):
if i == 3 or i == 4 or i == 11 or i == 12:
#NOTE: The scal factor is dimer specific
print "%3i %7s %13.4f +/- %8f"%(i+1,FromVVU(p0[i]),scal*(FromVVU(pmin)),scal*(FromVVU(sqrt(cov[i,i])*sqrt(chisq/dof))))
elif i == 5 or i == 6 or i ==7 or i == 13 or i == 14 or i == 15:
print "%3i %7s %13.4f +/- %8f"%(i+1,FixAngle(p0[i]),FixAngle(pmin),sqrt(cov[i,i])*sqrt(chisq/dof))
else:
print "%3i %7s %13.4f +/- %8f"%(i+1,p0[i],pmin,sqrt(cov[i,i])*sqrt(chisq/dof))
if tof == 1 or tof == 3 or tof == 5:
for i,pmin in enumerate(soln):
if i == 0 or i == 1 or i == 8 or i == 9:
#NOTE: The scal factor is dimer specific
print "%3i %7s %13.4f +/- %8f"%(i+1,FromVVU(p0[i]),scal*(FromVVU(pmin)),scal*(FromVVU(sqrt(cov[i,i])*sqrt(chisq/dof))))
elif i == 2 or i == 3 or i ==4 or i == 10 or i == 11 or i == 12:
print "%3i %7s %13.4f +/- %8f"%(i+1,FixAngle(p0[i]),FixAngle(pmin),sqrt(cov[i,i])*sqrt(chisq/dof))
else:
print "%3i %7s %13.4f +/- %8f"%(i+1,p0[i],pmin,sqrt(cov[i,i])*sqrt(chisq/dof))
if tof == 6:
for i,pmin in enumerate(soln):
if i == 0 or i == 1 or i == 5 or i == 6:
#NOTE: The scal factor is dimer specific
print "%3i %7s %13.4f +/- %8f"%(i+1,FromVVU(p0[i]),scal*(FromVVU(pmin)),scal*(FromVVU(sqrt(cov[i,i])*sqrt(chisq/dof))))
elif i == 2 or i == 3 or i == 4 or i == 7 or i == 8 or i == 9:
print "%3i %7s %13.4f +/- %8f"%(i+1,FixAngle(p0[i]),FixAngle(pmin),sqrt(cov[i,i])*sqrt(chisq/dof))
else:
print "%3i %7s %13.4f +/- %8f"%(i+1,p0[i],pmin,sqrt(cov[i,i])*sqrt(chisq/dof))
print 80*'-'
print
return chisq/dof
| apache-2.0 |
grahamgilbert/macnamer | namer/migrations/0006_auto__add_field_computergroup_key__add_unique_network_network.py | 2588 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ComputerGroup.key'
db.add_column(u'namer_computergroup', 'key',
self.gf('django.db.models.fields.CharField')(max_length=255, unique=True, null=True, blank=True),
keep_default=False)
# Adding unique constraint on 'Network', fields ['network']
db.create_unique(u'namer_network', ['network'])
def backwards(self, orm):
# Removing unique constraint on 'Network', fields ['network']
db.delete_unique(u'namer_network', ['network'])
# Deleting field 'ComputerGroup.key'
db.delete_column(u'namer_computergroup', 'key')
models = {
u'namer.computer': {
'Meta': {'ordering': "['name']", 'object_name': 'Computer'},
'computergroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['namer.ComputerGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_checkin': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'serial': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'namer.computergroup': {
'Meta': {'object_name': 'ComputerGroup'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'prefix': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'namer.network': {
'Meta': {'ordering': "['network']", 'object_name': 'Network'},
'computergroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['namer.ComputerGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'network': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
}
}
complete_apps = ['namer'] | apache-2.0 |
pravega/pravega | segmentstore/storage/src/main/java/io/pravega/segmentstore/storage/chunklayer/ChunkStorageException.java | 1565 | /**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.segmentstore.storage.chunklayer;
import lombok.Getter;
import java.io.IOException;
/**
* Generic exception related to chunk storage operations.
*/
public class ChunkStorageException extends IOException {
@Getter
private final String chunkName;
/**
* Creates a new instance of the exception.
*
* @param chunkName The name of the chunk.
* @param message The message for this exception.
*/
public ChunkStorageException(String chunkName, String message) {
super(message);
this.chunkName = chunkName;
}
/**
* Creates a new instance of the exception.
*
* @param chunkName The name of the chunk.
* @param message The message for this exception.
* @param cause The causing exception.
*/
public ChunkStorageException(String chunkName, String message, Throwable cause) {
super(message, cause);
this.chunkName = chunkName;
}
}
| apache-2.0 |
adriespina/testcuv | CuvooApi/CuvooApi/Areas/HelpPage/HelpPageConfigurationExtensions.cs | 24070 | using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using CuvooApi.Areas.HelpPage.ModelDescriptions;
using CuvooApi.Areas.HelpPage.Models;
namespace CuvooApi.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| apache-2.0 |
groupe-sii/ogham | ogham-core/src/test/java/mock/condition/annotation/SeveralRequiredClasses.java | 217 | package mock.condition.annotation;
import fr.sii.ogham.core.builder.condition.RequiredClasses;
@RequiredClasses({"class.required.1", "class.required.2", "class.required.3"})
public class SeveralRequiredClasses {
}
| apache-2.0 |
zhengxgs/elasticsearch-2.4.1 | core/src/test/java/org/elasticsearch/search/SearchServiceTests.java | 7747 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
import org.elasticsearch.test.ESSingleNodeTestCase;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.internal.ShardSearchLocalRequest;
import org.elasticsearch.search.query.QuerySearchResultProvider;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
public class SearchServiceTests extends ESSingleNodeTestCase {
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testClearOnClose() throws ExecutionException, InterruptedException {
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("field", "value").setRefresh(true).get();
SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get();
assertThat(searchResponse.getScrollId(), is(notNullValue()));
SearchService service = getInstanceFromNode(SearchService.class);
assertEquals(1, service.getActiveContexts());
service.doClose(); // this kills the keep-alive reaper we have to reset the node after this test
assertEquals(0, service.getActiveContexts());
}
public void testClearOnStop() throws ExecutionException, InterruptedException {
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("field", "value").setRefresh(true).get();
SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get();
assertThat(searchResponse.getScrollId(), is(notNullValue()));
SearchService service = getInstanceFromNode(SearchService.class);
assertEquals(1, service.getActiveContexts());
service.doStop();
assertEquals(0, service.getActiveContexts());
}
public void testClearIndexDelete() throws ExecutionException, InterruptedException {
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("field", "value").setRefresh(true).get();
SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get();
assertThat(searchResponse.getScrollId(), is(notNullValue()));
SearchService service = getInstanceFromNode(SearchService.class);
assertEquals(1, service.getActiveContexts());
assertAcked(client().admin().indices().prepareDelete("index"));
assertEquals(0, service.getActiveContexts());
}
public void testSearchWhileContextIsFreed() throws IOException, InterruptedException {
createIndex("index");
client().prepareIndex("index", "type", "1").setSource("field", "value").setRefresh(true).get();
final SearchService service = getInstanceFromNode(SearchService.class);
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
final IndexService indexService = indicesService.indexServiceSafe("index");
final IndexShard indexShard = indexService.shard(0);
final AtomicBoolean running = new AtomicBoolean(true);
final CountDownLatch startGun = new CountDownLatch(1);
final Semaphore semaphore = new Semaphore(Integer.MAX_VALUE);
final AtomicLong contextId = new AtomicLong(0);
final Thread thread = new Thread() {
@Override
public void run() {
startGun.countDown();
while(running.get()) {
service.freeContext(contextId.get());
if (randomBoolean()) {
// here we trigger some refreshes to ensure the IR go out of scope such that we hit ACE if we access a search
// context in a non-sane way.
try {
semaphore.acquire();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
client().prepareIndex("index", "type").setSource("field", "value")
.setRefresh(randomBoolean()).execute(new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse indexResponse) {
semaphore.release();
}
@Override
public void onFailure(Throwable e) {
semaphore.release();
}
});
}
}
}
};
thread.start();
startGun.await();
try {
final int rounds = scaledRandomIntBetween(100, 10000);
for (int i = 0; i < rounds; i++) {
try {
QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase(
new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
new BytesArray(""), new String[0], false));
contextId.set(querySearchResultProvider.id());
IntArrayList intCursors = new IntArrayList(1);
intCursors.add(0);
ShardFetchSearchRequest req = new ShardFetchSearchRequest(new SearchRequest()
,querySearchResultProvider.id(), intCursors, null /* not a scroll */);
service.executeFetchPhase(req);
} catch (AlreadyClosedException ex) {
throw ex;
} catch (IllegalStateException ex) {
assertEquals("search context is already closed can't increment refCount current count [0]", ex.getMessage());
} catch (SearchContextMissingException ex) {
// that's fine
}
}
} finally {
running.set(false);
thread.join();
semaphore.acquire(Integer.MAX_VALUE);
}
}
}
| apache-2.0 |
mylxsw/arsenals | Arsenals/Core/Benchmark.php | 1044 | <?php
namespace Arsenals\Core;
if (!defined('APP_NAME')) {
exit('Access Denied!');
}
/**
* 基准测试类
* 参考CI框架.
*
* @author 管宜尧<mylxsw@126.com>
*/
class Benchmark
{
private $_marker = [];
/**
* 记录一个时间点.
*
* @param unknown $name
*/
public function mark($name)
{
$this->_marker[$name] = microtime();
}
/**
* 总共消耗的时间计算.
*
* @param unknown $point1
* @param string $point2
* @param number $decimals
*
* @return string
*/
public function elapsedTime($point1, $point2 = '', $decimals = 4)
{
if (!isset($this->_marker[$point1])) {
return '';
}
if (!isset($this->_marker[$point2])) {
$this->_marker[$point2] = microtime();
}
list($sm, $ss) = explode(' ', $this->_marker[$point1]);
list($em, $es) = explode(' ', $this->_marker[$point2]);
return number_format(($em + $es) - ($sm + $ss), $decimals);
}
}
| apache-2.0 |
uakfdotb/Billing | src/App/AdminBundle/Business/ServerGroup/CreateModel.php | 183 | <?php
namespace App\AdminBundle\Business\ServerGroup;
class CreateModel
{
public $name;
public $description;
public $type;
public $choiceLogic;
public $primary;
} | apache-2.0 |
johnnAlex/Evaluador_Sintactico | ProyectoSintactico/src/model/utilities/Useful.java | 6736 | package model.utilities;
import java.util.Comparator;
import model.entities.ListVariables;
import model.entities.ListsReserved;
import model.entities.ListTerms;
import model.logic.List;
import persistence.Texto;
public class Useful {
private static final int TERMS_QUANTITY_SINGLE_CHAR = 10;
private String text;
private ListTerms listTerms;
private ListsReserved listsReserved;
private ListVariables listVariables;
private String cad1;
private String cad2;
private StringBuilder buildText;
private List<String> listTextString;
private int i;
private boolean answer = true;
public Useful() {
listTerms = new ListTerms();
listTextString = new List<String>();
listTerms.getList().setComparador(compareCaracter);
listsReserved = new ListsReserved();
listVariables = new ListVariables();
listVariables.setComparator(compareCaracter);
}
private void createBuildText() {
buildText = new StringBuilder();
}
public void readFile(String nameFile) {
createFile(nameFile);
openFile();
readText();
closeFile();
}
private void closeFile() {
Texto.close();
}
private void openFile() {
Texto.open();
}
private void createFile(String nameFile) {
Texto.CrearArchivoTexto((nameFile));
}
private void readText() {
createBuildText();
readFileText();
asignText();
}
private void asignText() {
this.text = buildText.toString();
}
private void readFileText() {
String line;
while ((line = Texto.read()) != null)
buildText.append(line + " ");
}
public void addGap() {
for (i = 0; i < listTerms.length(); i++) {
assignStringsAux();
assignAux2();
replaceDataReserved();
}
}
private void assignStringsAux() {
cad1 = listTerms.get(i);
cad2 = cad1;
}
private void assignAux2() {
if (i > TERMS_QUANTITY_SINGLE_CHAR) {
cad2 = listTerms.getData();
}
}
private void replaceDataReserved() {
text = text.replace(cad1, (" " + cad2 + " "));
}
public void removeGapUnnecessary() {
createBuildText();
removeTabs();
removeGaps();
asignText();
}
private void removeGaps() {
for (i = 0; i < text.length(); i++) {
assignStringAux();
lookCharacterIsGap();
concatenateNewText();
}
}
private void assignStringAux() {
cad1 = text.charAt(i) + "";
}
private void lookCharacterIsGap() {
if (isGap()) {
deleteGaps();
uniteStringsAux();
}
}
private void concatenateNewText() {
buildText.append(cad1);
}
private void uniteStringsAux() {
cad1 += cad2;
}
private boolean isGap() {
return cad1.equals(" ") && (i += 1) < text.length();
}
private void deleteGaps() {
while (i < text.length() && (cad2 = text.charAt(i) + "").equals(" "))
i++;
}
private void removeTabs() {
text = text.replace(" ", "");
}
public void splitTextInList() {
resetStringAux();
fillListString();
}
private void fillListString() {
for (i = 0; i < text.length(); i++) {
validateAndAssign();
}
}
private void validateAndAssign() {
if (isCharacterWhite()) {
addWordToList();
} else
assignWord();
}
private void addWordToList() {
addToListString();
resetStringAux();
}
private void addToListString() {
listTextString.add(cad1);
}
private boolean isCharacterWhite() {
return text.charAt(i) == ' ';
}
private void assignWord() {
cad1 += text.charAt(i);
}
private void resetStringAux() {
cad1 = "";
}
public boolean isVariant() {
boolean answer = false;
String auxList = listTextString.getNodefirst().getInfo();
if (Character.isLetter(auxList.charAt(0))) {
answer = true;
for (int i = 1; i < auxList.length(); i++) {
if (Character.isLetterOrDigit(auxList.charAt(i))) {
answer = true;
} else {
answer = false;
break;
}
}
}
return answer;
}
public boolean isInteger() {
boolean answer = false;
String auxList = listTextString.get();
for (int i = 0; i < auxList.length(); i++) {
if (Character.isDigit(auxList.charAt(i))) {
answer = true;
} else {
answer = false;
break;
}
}
return answer;
}
private Comparator<String> compareCaracter = new Comparator<String>() {
@Override
public int compare(String o1, String o2) {
return o1.compareTo(o2);
}
};
public void showLists() {
listTextString.showInfo();
}
public boolean satisfyFirstRule() {
for (int i = 0; i < listsReserved.lengthFirstList() && answer; i++) {
if (listTextString.get().equals(listsReserved.get())) {
answer = true;
} else {
answer = false;
}
}
return answer;
}
public boolean isVariable() {
return listVariables
.isVariable(listTextString.getNodefirst().getInfo());
}
public boolean satisfySecondRule() {
answer = isVariant() && !isVariable();
if (answer) {
listVariables.addTerm(listTextString.get());
listsReserved.resetVarI();
if (listTextString.get().equals(listsReserved.getSecondRule())) {
validateVarOrNum();
if (answer) {
if (!(listTextString.get().equals(listsReserved
.getSecondRule()))) {
answer = false;
}
}
} else {
answer = false;
}
}
return answer;
}
public boolean satisfyThirdRule() {
validateVarOrNum();
if (answer) {
if (listsReserved.isConditional(listTextString.get())) {
validateVarOrNum();
} else {
answer = false;
}
}
answer = listTextString.get().equals(";");
return answer;
}
private void validateVarOrNum() {
answer = isVariable();
if (!answer) {
answer = isInteger();
} else {
listTextString.removeIndex(0);
}
}
public boolean satisfyFourthtRule() {
answer = isVariable();
if (answer) {
listTextString.removeIndex(0);
answer = listsReserved.isOperator(listTextString.get());
if (answer) {
for (i = 1; i < listsReserved.lengthFourthList(); i++) {
answer = listTextString.get().equals(
listsReserved.getFourthRule());
}
if (answer) {
answer = listTextString.getFinal().equals("}");
if (answer)
listTextString.removeFinal();
}
}
}
return answer;
}
public boolean satisfyFifthRule() {
listsReserved.resetVarI();
return listTextString.getNodefirst().getInfo().equals("for");
}
public boolean satisfySeventhRule() {
return listTextString.get().equals("");
}
public boolean satisfySixthRule() {
listsReserved.resetVarI();
answer = listTextString.get().equals(listsReserved.getSixthRule());
if (answer) {
if (listTextString.get().equals(listsReserved.getSixthRule())) {
answer = isVariable();
if (answer) {
listTextString.removeIndex(0);
if (listTextString.get().equals(
listsReserved.getSixthRule())) {
answer = listTextString.get().equals(
listsReserved.getSixthRule());
}
}
}
}
return answer;
}
}
| apache-2.0 |
wyyyy/library | src/com/library/bean/EntityZh/RootZhihu.java | 3607 | package com.library.bean.EntityZh;
import java.io.Serializable;
import java.util.List;
public class RootZhihu implements Serializable
{
/**
*
*/
private static final long serialVersionUID = -5518677861267068851L;
private String rating;
private String sourceUrl;
private String publishedTime;
private Links links;
private Author author;
private Column column;
private List<Topics> topicss;
private String title;
private String titleImage;
private String summary;
private String content;
private String url;
private String state;
private String href;
private Meta meta;
private String commentPermission;
private String snapshotUrl;
private boolean canComment;
private int slug;
private int commentsCount;
private int likesCount;
public void setRating(String rating)
{
this.rating = rating;
}
public String getRating()
{
return this.rating;
}
public void setSourceUrl(String sourceUrl)
{
this.sourceUrl = sourceUrl;
}
public String getSourceUrl()
{
return this.sourceUrl;
}
public void setPublishedTime(String publishedTime)
{
this.publishedTime = publishedTime;
}
public String getPublishedTime()
{
return this.publishedTime;
}
public void setLinks(Links links)
{
this.links = links;
}
public Links getLinks()
{
return this.links;
}
public void setAuthor(Author author)
{
this.author = author;
}
public Author getAuthor()
{
return this.author;
}
public void setColumn(Column column)
{
this.column = column;
}
public Column getColumn()
{
return this.column;
}
public void setTopics(List<Topics> topics)
{
this.topicss = topics;
}
public List<Topics> getTopics()
{
return this.topicss;
}
public void setTitle(String title)
{
this.title = title;
}
public String getTitle()
{
return this.title;
}
public void setTitleImage(String titleImage)
{
this.titleImage = titleImage;
}
public String getTitleImage()
{
return this.titleImage;
}
public void setSummary(String summary)
{
this.summary = summary;
}
public String getSummary()
{
return this.summary;
}
public void setContent(String content)
{
this.content = content;
}
public String getContent()
{
return this.content;
}
public void setUrl(String url)
{
this.url = url;
}
public String getUrl()
{
return this.url;
}
public void setState(String state)
{
this.state = state;
}
public String getState()
{
return this.state;
}
public void setHref(String href)
{
this.href = href;
}
public String getHref()
{
return this.href;
}
public void setMeta(Meta meta)
{
this.meta = meta;
}
public Meta getMeta()
{
return this.meta;
}
public void setCommentPermission(String commentPermission)
{
this.commentPermission = commentPermission;
}
public String getCommentPermission()
{
return this.commentPermission;
}
public void setSnapshotUrl(String snapshotUrl)
{
this.snapshotUrl = snapshotUrl;
}
public String getSnapshotUrl()
{
return this.snapshotUrl;
}
public void setCanComment(boolean canComment)
{
this.canComment = canComment;
}
public boolean getCanComment()
{
return this.canComment;
}
public void setSlug(int slug)
{
this.slug = slug;
}
public int getSlug()
{
return this.slug;
}
public void setCommentsCount(int commentsCount)
{
this.commentsCount = commentsCount;
}
public int getCommentsCount()
{
return this.commentsCount;
}
public void setLikesCount(int likesCount)
{
this.likesCount = likesCount;
}
public int getLikesCount()
{
return this.likesCount;
}
} | apache-2.0 |
phenoxim/nova | nova/utils.py | 45083 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import copy
import datetime
import functools
import hashlib
import inspect
import os
import random
import re
import shutil
import tempfile
import time
import eventlet
from keystoneauth1 import exceptions as ks_exc
from keystoneauth1 import loading as ks_loading
import netaddr
from os_service_types import service_types
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_context import context as common_context
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
from oslo_utils import units
import six
from six.moves import range
import nova.conf
from nova import exception
from nova.i18n import _, _LE, _LI, _LW
import nova.network
from nova import safe_utils
profiler = importutils.try_import('osprofiler.profiler')
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
_IS_NEUTRON = None
synchronized = lockutils.synchronized_with_prefix('nova-')
SM_IMAGE_PROP_PREFIX = "image_"
SM_INHERITABLE_KEYS = (
'min_ram', 'min_disk', 'disk_format', 'container_format',
)
# Keys which hold large structured data that won't fit in the
# size constraints of the system_metadata table, so we avoid
# storing and/or loading them.
SM_SKIP_KEYS = (
# Legacy names
'mappings', 'block_device_mapping',
# Modern names
'img_mappings', 'img_block_device_mapping',
)
# Image attributes which Cinder stores in volume image metadata
# as regular properties
VIM_IMAGE_ATTRIBUTES = (
'image_id', 'image_name', 'size', 'checksum',
'container_format', 'disk_format', 'min_ram', 'min_disk',
)
_FILE_CACHE = {}
_SERVICE_TYPES = service_types.ServiceTypes()
if hasattr(inspect, 'getfullargspec'):
getargspec = inspect.getfullargspec
else:
getargspec = inspect.getargspec
def get_root_helper():
if CONF.workarounds.disable_rootwrap:
cmd = 'sudo'
else:
cmd = 'sudo nova-rootwrap %s' % CONF.rootwrap_config
return cmd
class RootwrapProcessHelper(object):
def trycmd(self, *cmd, **kwargs):
kwargs['root_helper'] = get_root_helper()
return processutils.trycmd(*cmd, **kwargs)
def execute(self, *cmd, **kwargs):
kwargs['root_helper'] = get_root_helper()
return processutils.execute(*cmd, **kwargs)
class RootwrapDaemonHelper(RootwrapProcessHelper):
_clients = {}
@synchronized('daemon-client-lock')
def _get_client(cls, rootwrap_config):
try:
return cls._clients[rootwrap_config]
except KeyError:
from oslo_rootwrap import client
new_client = client.Client([
"sudo", "nova-rootwrap-daemon", rootwrap_config])
cls._clients[rootwrap_config] = new_client
return new_client
def __init__(self, rootwrap_config):
self.client = self._get_client(rootwrap_config)
def trycmd(self, *args, **kwargs):
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = self.execute(*args, **kwargs)
failed = False
except processutils.ProcessExecutionError as exn:
out, err = '', six.text_type(exn)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
err = ''
return out, err
def execute(self, *cmd, **kwargs):
# NOTE(dims): This method is to provide compatibility with the
# processutils.execute interface. So that calling daemon or direct
# rootwrap to honor the same set of flags in kwargs and to ensure
# that we don't regress any current behavior.
cmd = [str(c) for c in cmd]
loglevel = kwargs.pop('loglevel', logging.DEBUG)
log_errors = kwargs.pop('log_errors', None)
process_input = kwargs.pop('process_input', None)
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
sanitized_cmd = strutils.mask_password(' '.join(cmd))
LOG.info(_LI('Executing RootwrapDaemonHelper.execute '
'cmd=[%(cmd)r] kwargs=[%(kwargs)r]'),
{'cmd': sanitized_cmd, 'kwargs': kwargs})
while attempts > 0:
attempts -= 1
try:
start_time = time.time()
LOG.log(loglevel, _('Running cmd (subprocess): %s'),
sanitized_cmd)
(returncode, out, err) = self.client.execute(
cmd, process_input)
end_time = time.time() - start_time
LOG.log(loglevel,
'CMD "%(sanitized_cmd)s" returned: %(return_code)s '
'in %(end_time)0.3fs',
{'sanitized_cmd': sanitized_cmd,
'return_code': returncode,
'end_time': end_time})
if not ignore_exit_code and returncode not in check_exit_code:
out = strutils.mask_password(out)
err = strutils.mask_password(err)
raise processutils.ProcessExecutionError(
exit_code=returncode,
stdout=out,
stderr=err,
cmd=sanitized_cmd)
return (out, err)
except processutils.ProcessExecutionError as err:
# if we want to always log the errors or if this is
# the final attempt that failed and we want to log that.
if log_errors == processutils.LOG_ALL_ERRORS or (
log_errors == processutils.LOG_FINAL_ERROR and
not attempts):
format = _('%(desc)r\ncommand: %(cmd)r\n'
'exit code: %(code)r\nstdout: %(stdout)r\n'
'stderr: %(stderr)r')
LOG.log(loglevel, format, {"desc": err.description,
"cmd": err.cmd,
"code": err.exit_code,
"stdout": err.stdout,
"stderr": err.stderr})
if not attempts:
LOG.log(loglevel, _('%r failed. Not Retrying.'),
sanitized_cmd)
raise
else:
LOG.log(loglevel, _('%r failed. Retrying.'),
sanitized_cmd)
if delay_on_retry:
time.sleep(random.randint(20, 200) / 100.0)
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method."""
if 'run_as_root' in kwargs and kwargs.get('run_as_root'):
if CONF.use_rootwrap_daemon:
return RootwrapDaemonHelper(CONF.rootwrap_config).execute(
*cmd, **kwargs)
else:
return RootwrapProcessHelper().execute(*cmd, **kwargs)
return processutils.execute(*cmd, **kwargs)
def ssh_execute(dest, *cmd, **kwargs):
"""Convenience wrapper to execute ssh command."""
ssh_cmd = ['ssh', '-o', 'BatchMode=yes']
ssh_cmd.append(dest)
ssh_cmd.extend(cmd)
return execute(*ssh_cmd, **kwargs)
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in range(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
def last_completed_audit_period(unit=None, before=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
before: Give the audit period most recently completed before
<timestamp>. Defaults to now.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous.
"""
if not unit:
unit = CONF.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
if before is not None:
rightnow = before
else:
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError(_('Time period must be hour, day, month or year'))
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=None, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
if length is None:
length = CONF.password_length
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in range(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
# TODO(sfinucan): Replace this with the equivalent from oslo.utils
def utf8(value):
"""Try to turn a string into utf-8 if possible.
The original code was copied from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if value is None or isinstance(value, six.binary_type):
return value
if not isinstance(value, six.text_type):
value = six.text_type(value)
return value.encode('utf-8')
def parse_server_string(server_str):
"""Parses the given server_string and returns a tuple of host and port.
If it's not a combination of host part and port, the port element
is an empty string. If the input is invalid expression, return a tuple of
two empty strings.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except (ValueError, netaddr.AddrFormatError):
LOG.error(_LE('Invalid server_string: %s'), server_str)
return ('', '')
def get_shortened_ipv6(address):
addr = netaddr.IPAddress(address, version=6)
return str(addr.ipv6())
def get_shortened_ipv6_cidr(address):
net = netaddr.IPNetwork(address, version=6)
return str(net.cidr)
def safe_ip_format(ip):
"""Transform ip string to "safe" format.
Will return ipv4 addresses unchanged, but will nest ipv6 addresses
inside square brackets.
"""
try:
if netaddr.IPAddress(ip).version == 6:
return '[%s]' % ip
except (TypeError, netaddr.AddrFormatError): # hostname
pass
# it's IPv4 or hostname
return ip
def format_remote_path(host, path):
"""Returns remote path in format acceptable for scp/rsync.
If host is IPv6 address literal, return '[host]:path', otherwise
'host:path' is returned.
If host is None, only path is returned.
"""
if host is None:
return path
return "%s:%s" % (safe_ip_format(host), path)
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def sanitize_hostname(hostname, default_name=None):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs except
the length of hostname.
Window, Linux, and Dnsmasq has different limitation:
Windows: 255 (net_bios limits to 15, but window will truncate it)
Linux: 64
Dnsmasq: 63
Due to nova-network will leverage dnsmasq to set hostname, so we chose
63.
"""
def truncate_hostname(name):
if len(name) > 63:
LOG.warning(_LW("Hostname %(hostname)s is longer than 63, "
"truncate it to %(truncated_name)s"),
{'hostname': name, 'truncated_name': name[:63]})
return name[:63]
if isinstance(hostname, six.text_type):
# Remove characters outside the Unicode range U+0000-U+00FF
hostname = hostname.encode('latin-1', 'ignore')
if six.PY3:
hostname = hostname.decode('latin-1')
hostname = truncate_hostname(hostname)
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
# NOTE(eliqiao): set hostname to default_display_name to avoid
# empty hostname
if hostname == "" and default_name is not None:
return truncate_hostname(default_name)
return hostname
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
def is_dict_like(thing):
return hasattr(thing, 'has_key') or isinstance(thing, dict)
def get(thing, attr, default):
if is_dict_like(thing):
return thing.get(attr, default)
else:
return getattr(thing, attr, default)
def set_value(thing, attr, val):
if is_dict_like(thing):
thing[attr] = val
else:
setattr(thing, attr, val)
def delete(thing, attr):
if is_dict_like(thing):
del thing[attr]
else:
delattr(thing, attr)
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = get(obj, attr, NOT_PRESENT)
set_value(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
delete(obj, attr)
else:
set_value(obj, attr, old_value)
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0xff),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
# NOTE(mikal): I really wanted this code to go away, but I can't find a way
# to implement what the callers of this method want with privsep. Basically,
# if we could hand off either a file descriptor or a file like object then
# we could make this go away.
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:param owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
nova.privsep.path.chown(path, uid=owner_uid)
try:
yield
finally:
if orig_uid != owner_uid:
nova.privsep.path.chown(path, uid=orig_uid)
@contextlib.contextmanager
def tempdir(**kwargs):
argdict = kwargs.copy()
if 'dir' not in argdict:
argdict['dir'] = CONF.tempdir
tmpdir = tempfile.mkdtemp(**argdict)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError as e:
LOG.error(_LE('Could not remove tmpdir: %s'), e)
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
def metadata_to_dict(metadata, include_deleted=False):
result = {}
for item in metadata:
if not include_deleted and item.get('deleted'):
continue
result[item['key']] = item['value']
return result
def dict_to_metadata(metadata):
result = []
for key, value in metadata.items():
result.append(dict(key=key, value=value))
return result
def instance_meta(instance):
if isinstance(instance['metadata'], dict):
return instance['metadata']
else:
return metadata_to_dict(instance['metadata'])
def instance_sys_meta(instance):
if not instance.get('system_metadata'):
return {}
if isinstance(instance['system_metadata'], dict):
return instance['system_metadata']
else:
return metadata_to_dict(instance['system_metadata'],
include_deleted=True)
def expects_func_args(*args):
def _decorator_checker(dec):
@functools.wraps(dec)
def _decorator(f):
base_f = safe_utils.get_wrapped_function(f)
argspec = getargspec(base_f)
if argspec[1] or argspec[2] or set(args) <= set(argspec[0]):
# NOTE (ndipanov): We can't really tell if correct stuff will
# be passed if it's a function with *args or **kwargs so
# we still carry on and hope for the best
return dec(f)
else:
raise TypeError("Decorated function %(f_name)s does not "
"have the arguments expected by the "
"decorator %(d_name)s" %
{'f_name': base_f.__name__,
'd_name': dec.__name__})
return _decorator
return _decorator_checker
class ExceptionHelper(object):
"""Class to wrap another and translate the ClientExceptions raised by its
function calls to the actual ones.
"""
def __init__(self, target):
self._target = target
def __getattr__(self, name):
func = getattr(self._target, name)
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except messaging.ExpectedException as e:
six.reraise(*e.exc_info)
return wrapper
def check_string_length(value, name=None, min_length=0, max_length=None):
"""Check the length of specified string
:param value: the value of the string
:param name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
"""
if not isinstance(value, six.string_types):
if name is None:
msg = _("The input is not a string or unicode")
else:
msg = _("%s is not a string or unicode") % name
raise exception.InvalidInput(message=msg)
if name is None:
name = value
if len(value) < min_length:
msg = _("%(name)s has a minimum character requirement of "
"%(min_length)s.") % {'name': name, 'min_length': min_length}
raise exception.InvalidInput(message=msg)
if max_length and len(value) > max_length:
msg = _("%(name)s has more than %(max_length)s "
"characters.") % {'name': name, 'max_length': max_length}
raise exception.InvalidInput(message=msg)
def validate_integer(value, name, min_value=None, max_value=None):
"""Make sure that value is a valid integer, potentially within range.
:param value: value of the integer
:param name: name of the integer
:param min_value: min_value of the integer
:param max_value: max_value of the integer
:returns: integer
:raise: InvalidInput If value is not a valid integer
"""
try:
return strutils.validate_integer(value, name, min_value, max_value)
except ValueError as e:
raise exception.InvalidInput(reason=six.text_type(e))
def _serialize_profile_info():
if not profiler:
return None
prof = profiler.get()
trace_info = None
if prof:
# FIXME(DinaBelova): we'll add profiler.get_info() method
# to extract this info -> we'll need to update these lines
trace_info = {
"hmac_key": prof.hmac_key,
"base_id": prof.get_base_id(),
"parent_id": prof.get_id()
}
return trace_info
def spawn(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
profiler_info = _serialize_profile_info()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
if profiler_info and profiler:
profiler.init(**profiler_info)
return func(*args, **kwargs)
return eventlet.spawn(context_wrapper, *args, **kwargs)
def spawn_n(func, *args, **kwargs):
"""Passthrough method for eventlet.spawn_n.
This utility exists so that it can be stubbed for testing without
interfering with the service spawns.
It will also grab the context from the threadlocal store and add it to
the store on the new thread. This allows for continuity in logging the
context when using this method to spawn a new thread.
"""
_context = common_context.get_current()
profiler_info = _serialize_profile_info()
@functools.wraps(func)
def context_wrapper(*args, **kwargs):
# NOTE: If update_store is not called after spawn_n it won't be
# available for the logger to pull from threadlocal storage.
if _context is not None:
_context.update_store()
if profiler_info and profiler:
profiler.init(**profiler_info)
func(*args, **kwargs)
eventlet.spawn_n(context_wrapper, *args, **kwargs)
def is_none_string(val):
"""Check if a string represents a None value.
"""
if not isinstance(val, six.string_types):
return False
return val.lower() == 'none'
def is_neutron():
global _IS_NEUTRON
if _IS_NEUTRON is not None:
return _IS_NEUTRON
_IS_NEUTRON = nova.network.is_neutron()
return _IS_NEUTRON
def is_auto_disk_config_disabled(auto_disk_config_raw):
auto_disk_config_disabled = False
if auto_disk_config_raw is not None:
adc_lowered = auto_disk_config_raw.strip().lower()
if adc_lowered == "disabled":
auto_disk_config_disabled = True
return auto_disk_config_disabled
def get_auto_disk_config_from_instance(instance=None, sys_meta=None):
if sys_meta is None:
sys_meta = instance_sys_meta(instance)
return sys_meta.get("image_auto_disk_config")
def get_auto_disk_config_from_image_props(image_properties):
return image_properties.get("auto_disk_config")
def get_system_metadata_from_image(image_meta, flavor=None):
system_meta = {}
prefix_format = SM_IMAGE_PROP_PREFIX + '%s'
for key, value in image_meta.get('properties', {}).items():
if key in SM_SKIP_KEYS:
continue
new_value = safe_truncate(six.text_type(value), 255)
system_meta[prefix_format % key] = new_value
for key in SM_INHERITABLE_KEYS:
value = image_meta.get(key)
if key == 'min_disk' and flavor:
if image_meta.get('disk_format') == 'vhd':
value = flavor['root_gb']
else:
value = max(value or 0, flavor['root_gb'])
if value is None:
continue
system_meta[prefix_format % key] = value
return system_meta
def get_image_from_system_metadata(system_meta):
image_meta = {}
properties = {}
if not isinstance(system_meta, dict):
system_meta = metadata_to_dict(system_meta, include_deleted=True)
for key, value in system_meta.items():
if value is None:
continue
# NOTE(xqueralt): Not sure this has to inherit all the properties or
# just the ones we need. Leaving it for now to keep the old behaviour.
if key.startswith(SM_IMAGE_PROP_PREFIX):
key = key[len(SM_IMAGE_PROP_PREFIX):]
if key in SM_SKIP_KEYS:
continue
if key in SM_INHERITABLE_KEYS:
image_meta[key] = value
else:
properties[key] = value
image_meta['properties'] = properties
return image_meta
def get_image_metadata_from_volume(volume):
properties = copy.copy(volume.get('volume_image_metadata', {}))
image_meta = {'properties': properties}
# Volume size is no longer related to the original image size,
# so we take it from the volume directly. Cinder creates
# volumes in Gb increments, and stores size in Gb, whereas
# glance reports size in bytes. As we're returning glance
# metadata here, we need to convert it.
image_meta['size'] = volume.get('size', 0) * units.Gi
# NOTE(yjiang5): restore the basic attributes
# NOTE(mdbooth): These values come from volume_glance_metadata
# in cinder. This is a simple key/value table, and all values
# are strings. We need to convert them to ints to avoid
# unexpected type errors.
for attr in VIM_IMAGE_ATTRIBUTES:
val = properties.pop(attr, None)
if attr in ('min_ram', 'min_disk'):
image_meta[attr] = int(val or 0)
# NOTE(yjiang5): Always set the image status as 'active'
# and depends on followed volume_api.check_attach() to
# verify it. This hack should be harmless with that check.
image_meta['status'] = 'active'
return image_meta
def get_hash_str(base_str):
"""Returns string that represents MD5 hash of base_str (in hex format).
If base_str is a Unicode string, encode it to UTF-8.
"""
if isinstance(base_str, six.text_type):
base_str = base_str.encode('utf-8')
return hashlib.md5(base_str).hexdigest()
def get_sha256_str(base_str):
"""Returns string that represents sha256 hash of base_str (in hex format).
sha1 and md5 are known to be breakable, so sha256 is a better option
when the hash is being used for security purposes. If hashing passwords
or anything else that needs to be retained for a long period a salted
hash is better.
"""
if isinstance(base_str, six.text_type):
base_str = base_str.encode('utf-8')
return hashlib.sha256(base_str).hexdigest()
def get_obj_repr_unicode(obj):
"""Returns a string representation of an object converted to unicode.
In the case of python 3, this just returns the repr() of the object,
else it converts the repr() to unicode.
"""
obj_repr = repr(obj)
if not six.PY3:
obj_repr = six.text_type(obj_repr, 'utf-8')
return obj_repr
def filter_and_format_resource_metadata(resource_type, resource_list,
search_filts, metadata_type=None):
"""Get all metadata for a list of resources after filtering.
Search_filts is a list of dictionaries, where the values in the dictionary
can be string or regex string, or a list of strings/regex strings.
Let's call a dict a 'filter block' and an item in the dict
a 'filter'. A tag is returned if it matches ALL the filters in
a filter block. If more than one values are specified for a
filter, a tag is returned if it matches ATLEAST ONE value of the filter. If
more than one filter blocks are specified, the tag should match ALL the
filter blocks.
For example:
search_filts = [{'key': ['key1', 'key2'], 'value': 'val1'},
{'value': 'val2'}]
The filter translates to 'match any tag for which':
((key=key1 AND value=val1) OR (key=key2 AND value=val1)) AND
(value=val2)
This example filter will never match a tag.
:param resource_type: The resource type as a string, e.g. 'instance'
:param resource_list: List of resource objects
:param search_filts: Filters to filter metadata to be returned. Can be
dict (e.g. {'key': 'env', 'value': 'prod'}, or a list of dicts
(e.g. [{'key': 'env'}, {'value': 'beta'}]. Note that the values
of the dict can be regular expressions.
:param metadata_type: Provided to search for a specific metadata type
(e.g. 'system_metadata')
:returns: List of dicts where each dict is of the form {'key':
'somekey', 'value': 'somevalue', 'instance_id':
'some-instance-uuid-aaa'} if resource_type is 'instance'.
"""
if isinstance(search_filts, dict):
search_filts = [search_filts]
def _get_id(resource):
if resource_type == 'instance':
return resource.get('uuid')
def _match_any(pattern_list, string):
if isinstance(pattern_list, str):
pattern_list = [pattern_list]
return any([re.match(pattern, string)
for pattern in pattern_list])
def _filter_metadata(resource, search_filt, input_metadata):
ids = search_filt.get('resource_id', [])
keys_filter = search_filt.get('key', [])
values_filter = search_filt.get('value', [])
output_metadata = {}
if ids and _get_id(resource) not in ids:
return {}
for k, v in input_metadata.items():
# Both keys and value defined -- AND
if (keys_filter and values_filter and
not _match_any(keys_filter, k) and
not _match_any(values_filter, v)):
continue
# Only keys or value is defined
elif ((keys_filter and not _match_any(keys_filter, k)) or
(values_filter and not _match_any(values_filter, v))):
continue
output_metadata[k] = v
return output_metadata
formatted_metadata_list = []
for res in resource_list:
if resource_type == 'instance':
# NOTE(rushiagr): metadata_type should be 'metadata' or
# 'system_metadata' if resource_type is instance. Defaulting to
# 'metadata' if not specified.
if metadata_type is None:
metadata_type = 'metadata'
metadata = res.get(metadata_type, {})
for filt in search_filts:
# By chaining the input to the output, the filters are
# ANDed together
metadata = _filter_metadata(res, filt, metadata)
for (k, v) in metadata.items():
formatted_metadata_list.append({'key': k, 'value': v,
'%s_id' % resource_type: _get_id(res)})
return formatted_metadata_list
def safe_truncate(value, length):
"""Safely truncates unicode strings such that their encoded length is
no greater than the length provided.
"""
b_value = encodeutils.safe_encode(value)[:length]
# NOTE(chaochin) UTF-8 character byte size varies from 1 to 6. If
# truncating a long byte string to 255, the last character may be
# cut in the middle, so that UnicodeDecodeError will occur when
# converting it back to unicode.
decode_ok = False
while not decode_ok:
try:
u_value = encodeutils.safe_decode(b_value)
decode_ok = True
except UnicodeDecodeError:
b_value = b_value[:-1]
return u_value
def read_cached_file(filename, force_reload=False):
"""Read from a file if it has been modified.
:param force_reload: Whether to reload the file.
:returns: A tuple with a boolean specifying if the data is fresh
or not.
"""
global _FILE_CACHE
if force_reload:
delete_cached_file(filename)
reloaded = False
mtime = os.path.getmtime(filename)
cache_info = _FILE_CACHE.setdefault(filename, {})
if not cache_info or mtime > cache_info.get('mtime', 0):
LOG.debug("Reloading cached file %s", filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
reloaded = True
return (reloaded, cache_info['data'])
def delete_cached_file(filename):
"""Delete cached file if present.
:param filename: filename to delete
"""
global _FILE_CACHE
if filename in _FILE_CACHE:
del _FILE_CACHE[filename]
def isotime(at=None):
"""Current time as ISO string,
as timeutils.isotime() is deprecated
:returns: Current time in ISO format
"""
if not at:
at = timeutils.utcnow()
date_string = at.strftime("%Y-%m-%dT%H:%M:%S")
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
date_string += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz)
return date_string
def strtime(at):
return at.strftime("%Y-%m-%dT%H:%M:%S.%f")
def get_ksa_adapter(service_type, ksa_auth=None, ksa_session=None,
min_version=None, max_version=None):
"""Construct a keystoneauth1 Adapter for a given service type.
We expect to find a conf group whose name corresponds to the service_type's
project according to the service-types-authority. That conf group must
provide at least ksa adapter options. Depending how the result is to be
used, ksa auth and/or session options may also be required, or the relevant
parameter supplied.
:param service_type: String name of the service type for which the Adapter
is to be constructed.
:param ksa_auth: A keystoneauth1 auth plugin. If not specified, we attempt
to find one in ksa_session. Failing that, we attempt to
load one from the conf.
:param ksa_session: A keystoneauth1 Session. If not specified, we attempt
to load one from the conf.
:param min_version: The minimum major version of the adapter's endpoint,
intended to be used as the lower bound of a range with
max_version.
If min_version is given with no max_version it is as
if max version is 'latest'.
:param max_version: The maximum major version of the adapter's endpoint,
intended to be used as the upper bound of a range with
min_version.
:return: A keystoneauth1 Adapter object for the specified service_type.
:raise: ConfGroupForServiceTypeNotFound If no conf group name could be
found for the specified service_type.
"""
# Get the conf group corresponding to the service type.
confgrp = _SERVICE_TYPES.get_project_name(service_type)
if not confgrp or not hasattr(CONF, confgrp):
# Try the service type as the conf group. This is necessary for e.g.
# placement, while it's still part of the nova project.
# Note that this might become the first thing we try if/as we move to
# using service types for conf group names in general.
confgrp = service_type
if not confgrp or not hasattr(CONF, confgrp):
raise exception.ConfGroupForServiceTypeNotFound(stype=service_type)
# Ensure we have an auth.
# NOTE(efried): This could be None, and that could be okay - e.g. if the
# result is being used for get_endpoint() and the conf only contains
# endpoint_override.
if not ksa_auth:
if ksa_session and ksa_session.auth:
ksa_auth = ksa_session.auth
else:
ksa_auth = ks_loading.load_auth_from_conf_options(CONF, confgrp)
if not ksa_session:
ksa_session = ks_loading.load_session_from_conf_options(
CONF, confgrp, auth=ksa_auth)
return ks_loading.load_adapter_from_conf_options(
CONF, confgrp, session=ksa_session, auth=ksa_auth,
min_version=min_version, max_version=max_version)
def get_endpoint(ksa_adapter):
"""Get the endpoint URL represented by a keystoneauth1 Adapter.
This method is equivalent to what
ksa_adapter.get_endpoint()
should do, if it weren't for a panoply of bugs.
:param ksa_adapter: keystoneauth1.adapter.Adapter, appropriately set up
with an endpoint_override; or service_type, interface
(list) and auth/service_catalog.
:return: String endpoint URL.
:raise EndpointNotFound: If endpoint discovery fails.
"""
# TODO(efried): This will be unnecessary once bug #1707993 is fixed.
# (At least for the non-image case, until 1707995 is fixed.)
if ksa_adapter.endpoint_override:
return ksa_adapter.endpoint_override
# TODO(efried): Remove this once bug #1707995 is fixed.
if ksa_adapter.service_type == 'image':
try:
return ksa_adapter.get_endpoint_data().catalog_url
except AttributeError:
# ksa_adapter.auth is a _ContextAuthPlugin, which doesn't have
# get_endpoint_data. Fall through to using get_endpoint().
pass
# TODO(efried): The remainder of this method reduces to
# TODO(efried): return ksa_adapter.get_endpoint()
# TODO(efried): once bug #1709118 is fixed.
# NOTE(efried): Id9bd19cca68206fc64d23b0eaa95aa3e5b01b676 may also do the
# trick, once it's in a ksa release.
# The EndpointNotFound exception happens when _ContextAuthPlugin is in play
# because its get_endpoint() method isn't yet set up to handle interface as
# a list. (It could also happen with a real auth if the endpoint isn't
# there; but that's covered below.)
try:
return ksa_adapter.get_endpoint()
except ks_exc.EndpointNotFound:
pass
interfaces = list(ksa_adapter.interface)
for interface in interfaces:
ksa_adapter.interface = interface
try:
return ksa_adapter.get_endpoint()
except ks_exc.EndpointNotFound:
pass
raise ks_exc.EndpointNotFound(
"Could not find requested endpoint for any of the following "
"interfaces: %s" % interfaces)
def generate_hostid(host, project_id):
"""Generate an obfuscated host id representing the host.
This is a hashed value so will not actually look like a hostname, and is
hashed with data from the project_id.
:param host: The name of the compute host.
:param project_id: The UUID of the project.
:return: An obfuscated hashed host id string, return "" if host is empty
"""
if host:
data = (project_id + host).encode('utf-8')
sha_hash = hashlib.sha224(data)
return sha_hash.hexdigest()
return ""
| apache-2.0 |
lehmann/BrainSimulator | app/src/main/java/com/github/neuralnetworks/training/backpropagation/BackpropagationMaxout.java | 5496 | package com.github.neuralnetworks.training.backpropagation;
import com.github.neuralnetworks.architecture.Connections;
import com.github.neuralnetworks.architecture.Layer;
import com.github.neuralnetworks.calculation.memory.ValuesProvider;
import com.github.neuralnetworks.calculation.neuronfunctions.AparapiFullyConnected;
import com.github.neuralnetworks.calculation.neuronfunctions.MaxoutWinners;
import com.github.neuralnetworks.tensor.Matrix;
import com.github.neuralnetworks.tensor.Tensor;
import com.github.neuralnetworks.tensor.TensorFactory;
import com.github.neuralnetworks.util.Properties;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
public class BackpropagationMaxout extends BackPropagationConnectionCalculatorImpl {
private static final long serialVersionUID = 1L;
public BackpropagationMaxout(Properties properties) {
super(properties);
}
@Override
protected void addBackpropFunction(List<Connections> inputConnections, Map<Connections, BackPropagationConnectionCalculator> connectionCalculators, ValuesProvider valuesProvider, ValuesProvider activations, Layer targetLayer) {
for (Connections c : inputConnections) {
connectionCalculators.put(c, new AparapiBackpropMaxout(c, valuesProvider, activations, Arrays.asList(getWeightUpdates().get(c)), getLearningRate(), getMomentum(), getL1weightDecay(), getL2weightDecay()));
}
}
@Override
public void calculate(List<Connections> connections, ValuesProvider valuesProvider, Layer targetLayer) {
targetLayer = connections.get(0).getOutputLayer();
for (Connections c : connections) {
if (targetLayer != c.getOutputLayer()) {
throw new IllegalArgumentException("No common target layer");
}
}
super.calculate(connections, valuesProvider, targetLayer);
}
public static class AparapiBackpropMaxout extends AparapiFullyConnected implements BackPropagationConnectionCalculator {
private static final long serialVersionUID = 1L;
/**
* Activation of the output layer from the feedforward phase
*/
@Constant
protected float[] ffActivation;
protected final int activationStartPosition;
protected final int activationRowStep;
protected final int activationColumnStep;
/**
* Weight updates array
*/
protected final float[] weightUpdates;
protected float learningRate;
protected final float momentum;
protected final float l1weightDecay;
protected final float l2weightDecay;
private final int[] winnersStartPositions;
private final int[] maxoutWinners;
public AparapiBackpropMaxout(Connections inputConnection, ValuesProvider valuesProvider, ValuesProvider activations, List<Tensor> weightUpdates, float learningRate, float momentum, float l1weightDecay, float l2weightDecay) {
super(Arrays.asList(new Connections[] {inputConnection}), valuesProvider, inputConnection.getOutputLayer());
Matrix m = TensorFactory.tensor(inputConnection.getInputLayer(), inputConnection, activations);
this.ffActivation = m.getElements();
this.activationStartPosition = m.getStartIndex();
this.activationRowStep = m.getRowElementsDistance();
this.activationColumnStep = m.getColumnElementsDistance();
this.learningRate = momentum;
this.momentum = momentum;
this.l1weightDecay = l1weightDecay;
this.l2weightDecay = l2weightDecay;
this.weightUpdates = weightUpdates.get(0).getElements();
this.winnersStartPositions = MaxoutWinners.getInstance().getStartPositions(Arrays.asList(new Connections[] {inputConnection}));
this.maxoutWinners = MaxoutWinners.getInstance().getWinners();
}
@Override
public void run() {
int id = getGlobalId();
int maxoutId = 0, weightId = 0;
float weight = 0, weightUpdate = 0;
// each input example
for (int i = 0; i < miniBatchSize; i++) {
// each connection (of the combined connections)
for (int k = 0; k < series; k++) {
maxoutId = maxoutWinners[winnersStartPositions[k] + id * miniBatchSize + i];
weightId = weightStartPositions[k] + weightsInitialStep[k] * id + maxoutId * weightsStep[k];
weight = weights[weightId];
weightUpdate += output[outputStartPosition + id * outputRowStep + i * outputColumnStep] * ffActivation[activationStartPosition + maxoutId * activationRowStep + i * activationColumnStep];
weightUpdate = learningRate * weightUpdate + momentum * weightUpdates[weightId] - l1weightDecay * abs(weight) - l2weightDecay * weight * weight / 2;
weights[weightId] += weightUpdate;
weightUpdates[weightId] = weightUpdate;
input[activationStartPosition + maxoutId * activationRowStep + i * activationColumnStep] += output[outputStartPosition + id * outputRowStep + i * outputColumnStep];
}
}
}
@Override
public float getLearningRate() {
return learningRate;
}
@Override
public void setLearningRate(float learningRate) {
this.learningRate = learningRate;
}
@Override
public float getMomentum() {
return momentum;
}
@Override
public void setMomentum(float momentum) {
}
@Override
public float getL1weightDecay() {
return l1weightDecay;
}
@Override
public void setL1weightDecay(float weightDecay) {
}
@Override
public float getL2weightDecay() {
return l2weightDecay;
}
@Override
public void setL2weightDecay(float l2weightDecay) {
}
@Override
public ValuesProvider getActivations() {
return null;
}
@Override
public void setActivations(ValuesProvider activations) {
}
}
}
| apache-2.0 |
raulavila/tempus-fugit | src/main/java/com/google/code/tempusfugit/condition/ThreadAliveCondition.java | 1004 | /*
* Copyright (c) 2009-2015, toby weston & tempus-fugit committers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.code.tempusfugit.condition;
import com.google.code.tempusfugit.temporal.Condition;
public class ThreadAliveCondition implements Condition {
private final Thread thread;
public ThreadAliveCondition(Thread thread) {
this.thread = thread;
}
@Override
public boolean isSatisfied() {
return thread.isAlive();
}
}
| apache-2.0 |
DIPlib/diplib | src/morphology/basic.cpp | 64554 | /*
* (c)2017, Cris Luengo.
* Based on original DIPlib code: (c)1995-2014, Delft University of Technology.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <utility>
#include "diplib.h"
#include "diplib/morphology.h"
#include "diplib/kernel.h"
#include "diplib/geometry.h"
#include "diplib/framework.h"
#include "diplib/pixel_table.h"
#include "diplib/overload.h"
#include "diplib/library/copy_buffer.h"
#include "one_dimensional.h"
namespace dip {
// This function defined here, not in the header, to avoid pulling in kernel.h and its dependencies there.
dip::Kernel StructuringElement::Kernel() const {
dip::Kernel out;
switch( shape_ ) {
case ShapeCode::RECTANGULAR:
out = { Kernel::ShapeCode::RECTANGULAR, params_ };
break;
case ShapeCode::ELLIPTIC:
out = { Kernel::ShapeCode::ELLIPTIC, params_ };
break;
case ShapeCode::DIAMOND:
out = { Kernel::ShapeCode::DIAMOND, params_ };
break;
case ShapeCode::DISCRETE_LINE:
out = { Kernel::ShapeCode::LINE, params_ };
break;
case ShapeCode::CUSTOM:
out = { image_ };
break;
// TODO: ShapeCode::OCTAGONAL and ShapeCode::PERIODIC_LINE could be converted to ShapeCode::CUSTOM, but only if the image dimensionality is known.
default:
DIP_THROW( "Cannot create kernel for this structuring element shape" );
}
if( mirror_ ) {
out.Mirror();
}
return out;
}
namespace detail {
namespace {
// Extend the image by `2*boundary`, setting a view around the input + 1*boundary. This allows a first operation
// to read past the image boundary, and still save results outside the original image boundary. These results
// can then be used by a second operation for correct results.
void ExtendImageDoubleBoundary(
Image const& in,
Image& out,
UnsignedArray const& boundary,
BoundaryConditionArray const&bc
) {
// Expand by 2*boundary using `bc`.
UnsignedArray doubleBoundary = boundary;
for( auto& b : doubleBoundary ) {
b *= 2;
}
ExtendImage( in, out, doubleBoundary, bc );
// Crop the image by 1*boundary, leaving it larger than `in` by 1*boundary.
UnsignedArray outSizes = out.Sizes();
dip::sint offset = 0;
for( dip::uint ii = 0; ii < out.Dimensionality(); ++ii ) {
outSizes[ ii ] -= doubleBoundary[ ii ];
offset += static_cast< dip::sint >( boundary[ ii ] ) * out.Stride( ii );
}
out.SetSizesUnsafe( std::move( outSizes ));
out.SetOriginUnsafe( out.Pointer( offset ));
// Later after the first processing step, crop the image to the original size.
}
// --- Pixel table morphology ---
template< typename TPI >
class FlatSEMorphologyLineFilter : public Framework::FullLineFilter {
public:
FlatSEMorphologyLineFilter( Polarity polarity ) : dilation_( polarity == Polarity::DILATION ) {}
virtual dip::uint GetNumberOfOperations( dip::uint lineLength, dip::uint, dip::uint nKernelPixels, dip::uint nRuns ) override {
// Number of operations depends on data, so we cannot guess as to how many we'll do. On average:
dip::uint averageRunLength = div_ceil( nKernelPixels, nRuns );
dip::uint timesNoMaxInFilter = lineLength / averageRunLength;
dip::uint timesMaxInFilter = lineLength - timesNoMaxInFilter;
return timesMaxInFilter * (
nRuns * 4 // number of multiply-adds and comparisons
+ nRuns ) // iterating over pixel table runs
+ timesNoMaxInFilter * (
nKernelPixels * 2 // number of comparisons
+ 2 * nKernelPixels + nRuns ); // iterating over pixel table
}
virtual void SetNumberOfThreads( dip::uint, PixelTableOffsets const& pixelTable ) override {
// Let's determine how to process the neighborhood
dip::uint averageRunLength = div_ceil( pixelTable.NumberOfPixels(), pixelTable.Runs().size() );
bruteForce_ = averageRunLength < 4; // Experimentally determined
//std::cout << ( bruteForce_ ? " Using brute force method\n" : " Using run length method\n" );
if( bruteForce_ ) {
offsets_ = pixelTable.Offsets();
}
}
virtual void Filter( Framework::FullLineFilterParameters const& params ) override {
TPI* in = static_cast< TPI* >( params.inBuffer.buffer );
dip::sint inStride = params.inBuffer.stride;
TPI* out = static_cast< TPI* >( params.outBuffer.buffer );
dip::sint outStride = params.outBuffer.stride;
dip::uint length = params.bufferLength;
if( bruteForce_ ) {
if( dilation_ ) {
for( dip::uint ii = 0; ii < length; ++ii ) {
auto it = offsets_.begin();
TPI max = in[ *it ];
++it;
while( it != offsets_.end() ) {
max = std::max( max, in[ *it ] );
++it;
}
*out = max;
out += outStride;
in += inStride;
}
} else {
for( dip::uint ii = 0; ii < length; ++ii ) {
auto it = offsets_.begin();
TPI min = in[ *it ];
++it;
while( it != offsets_.end() ) {
min = std::min( min, in[ *it ] );
++it;
}
*out = min;
out += outStride;
in += inStride;
}
}
} else {
PixelTableOffsets const& pixelTable = params.pixelTable;
if( dilation_ ) {
TPI max = 0; // The maximum value within the filter
dip::sint index = -1; // Location of the maximum value w.r.t. the left edge
for( dip::uint ii = 0; ii < length; ++ii ) {
// Check whether maximum is in filter
if( index >= 0 ) {
// Maximum is in filter. Check to see if a larger value came in to the filter.
for( auto const& run : pixelTable.Runs() ) {
dip::sint len = static_cast< dip::sint >( run.length - 1 );
dip::sint position = run.offset + len * inStride;
TPI val = in[ position ];
if( max == val ) {
index = std::max( index, static_cast< dip::sint >( len ));
} else if( val > max ) {
max = val;
index = len;
}
}
} else {
// Maximum is no longer in the filter. Find maximum by looping over all pixels in the table.
index = 0;
max = std::numeric_limits< TPI >::lowest();
//for( auto it = pixelTable.begin(); !it.IsAtEnd(); ++it ) {
for( auto const& run : pixelTable.Runs() ) {
dip::sint offset = run.offset;
for( dip::uint jj = 0; jj < run.length; ++jj ) {
TPI val = in[ offset ];
if( max == val ) {
index = std::max( index, static_cast< dip::sint >( jj ));
} else if( val > max ) {
max = val;
index = static_cast< dip::sint >( jj );
}
offset += pixelTable.Stride();
}
}
}
*out = max;
out += outStride;
in += inStride;
index--;
}
} else {
TPI min = 0; // The minimum value within the filter
dip::sint index = -1; // Location of the minimum value w.r.t. the left edge
for( dip::uint ii = 0; ii < length; ++ii ) {
// Check whether minimum is in filter
if( index >= 0 ) {
// Minimum is in filter. Check to see if a smaller value came in to the filter.
for( auto const& run : pixelTable.Runs() ) {
dip::sint len = static_cast< dip::sint >( run.length - 1 );
dip::sint position = run.offset + len * inStride;
TPI val = in[ position ];
if( min == val ) {
index = std::max( index, static_cast< dip::sint >( len ));
} else if( val < min ) {
min = val;
index = len;
}
}
} else {
// Minimum is no longer in the filter. Find minimum by looping over all pixels in the table.
index = 0;
min = std::numeric_limits< TPI >::max();
//for( auto it = pixelTable.begin(); !it.IsAtEnd(); ++it ) {
for( auto const& run : pixelTable.Runs() ) {
dip::sint offset = run.offset;
for( dip::uint jj = 0; jj < run.length; ++jj ) {
TPI val = in[ offset ];
if( min == val ) {
index = std::max( index, static_cast< dip::sint >( jj ));
} else if( val < min ) {
min = val;
index = static_cast< dip::sint >( jj );
}
offset += pixelTable.Stride();
}
}
}
*out = min;
out += outStride;
in += inStride;
index--;
}
}
}
}
private:
bool dilation_;
bool bruteForce_ = false;
std::vector< dip::sint > offsets_; // used when bruteForce_
};
template< typename TPI >
class GreyValueSEMorphologyLineFilter : public Framework::FullLineFilter {
public:
GreyValueSEMorphologyLineFilter( Polarity polarity ) : dilation_( polarity == Polarity::DILATION ) {}
virtual dip::uint GetNumberOfOperations( dip::uint lineLength, dip::uint, dip::uint nKernelPixels, dip::uint ) override {
return lineLength * nKernelPixels * 3;
}
virtual void SetNumberOfThreads( dip::uint, PixelTableOffsets const& pixelTable ) override {
offsets_ = pixelTable.Offsets();
}
virtual void Filter( Framework::FullLineFilterParameters const& params ) override {
TPI* in = static_cast< TPI* >( params.inBuffer.buffer );
dip::sint inStride = params.inBuffer.stride;
TPI* out = static_cast< TPI* >( params.outBuffer.buffer );
dip::sint outStride = params.outBuffer.stride;
dip::uint length = params.bufferLength;
std::vector< dfloat > const& weights = params.pixelTable.Weights();
if( dilation_ ) {
for( dip::uint ii = 0; ii < length; ++ii ) {
TPI max = std::numeric_limits< TPI >::lowest();
auto ito = offsets_.begin();
auto itw = weights.begin();
while( ito != offsets_.end() ) {
max = std::max( max, clamp_cast< TPI >( static_cast< dfloat >( in[ *ito ] ) + *itw ));
++ito;
++itw;
}
*out = max;
in += inStride;
out += outStride;
}
} else {
for( dip::uint ii = 0; ii < length; ++ii ) {
TPI min = std::numeric_limits< TPI >::max();
auto ito = offsets_.begin();
auto itw = weights.begin();
while( ito != offsets_.end() ) {
min = std::min( min, clamp_cast< TPI >( static_cast< dfloat >( in[ *ito ] ) - *itw ));
++ito;
++itw;
}
*out = min;
in += inStride;
out += outStride;
}
}
}
private:
bool dilation_;
std::vector< dip::sint > offsets_;
};
void GeneralSEMorphology(
Image const& in,
Image& out,
Kernel& kernel,
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation
) {
bool hasWeights = kernel.HasWeights();
UnsignedArray originalImageSize = in.Sizes();
Framework::FullOptions opts = {};
DataType dtype = in.DataType();
DataType ovltype = dtype;
if( ovltype.IsBinary() ) {
ovltype = DT_UINT8; // Dirty trick: process a binary image with the same filter as a UINT8 image, but don't convert the type -- for some reason this is faster!
DIP_THROW_IF( hasWeights, E::DATA_TYPE_NOT_SUPPORTED );
}
std::unique_ptr< Framework::FullLineFilter > lineFilter;
DIP_START_STACK_TRACE
switch( operation ) {
case BasicMorphologyOperation::DILATION:
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
}
Framework::Full( in, out, dtype, dtype, dtype, 1, BoundaryConditionForDilation( bc ), kernel, *lineFilter );
break;
case BasicMorphologyOperation::EROSION:
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
}
Framework::Full( in, out, dtype, dtype, dtype, 1, BoundaryConditionForErosion( bc ), kernel, *lineFilter );
break;
case BasicMorphologyOperation::CLOSING:
ExtendImageDoubleBoundary( in, out, kernel.Boundary( in.Dimensionality() ), BoundaryConditionForDilation( bc ));
opts += Framework::FullOption::BorderAlreadyExpanded;
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
}
Framework::Full( out, out, dtype, dtype, dtype, 1, {}, kernel, *lineFilter, opts );
// Note that the output image has a newly-allocated data segment, we've lost the boundary extension we had.
// But we still have an `out` that is larger than `in` by one boundary extension.
out.Crop( originalImageSize );
kernel.Mirror();
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
}
Framework::Full( out, out, dtype, dtype, dtype, 1, {}, kernel, *lineFilter, opts );
break;
case BasicMorphologyOperation::OPENING:
ExtendImageDoubleBoundary( in, out, kernel.Boundary( in.Dimensionality() ), BoundaryConditionForErosion( bc ));
opts += Framework::FullOption::BorderAlreadyExpanded;
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::EROSION ), ovltype );
}
Framework::Full( out, out, dtype, dtype, dtype, 1, {}, kernel, *lineFilter, opts );
// Note that the output image has a newly-allocated data segment, we've lost the boundary extension we had.
// But we still have an `out` that is larger than `in` by one boundary extension.
out.Crop( originalImageSize );
kernel.Mirror();
if( hasWeights ) {
DIP_OVL_NEW_REAL( lineFilter, GreyValueSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
} else {
DIP_OVL_NEW_REAL( lineFilter, FlatSEMorphologyLineFilter, ( Polarity::DILATION ), ovltype );
}
Framework::Full( out, out, dtype, dtype, dtype, 1, {}, kernel, *lineFilter, opts );
break;
}
DIP_END_STACK_TRACE
}
// --- Parabolic morphology ---
template< typename TPI >
class ParabolicMorphologyLineFilter : public Framework::SeparableLineFilter {
public:
ParabolicMorphologyLineFilter( FloatArray const& params, Polarity polarity ) :
params_( params ), dilation_( polarity == Polarity::DILATION ) {}
virtual void SetNumberOfThreads( dip::uint threads ) override {
buffers_.resize( threads );
}
virtual dip::uint GetNumberOfOperations( dip::uint lineLength, dip::uint, dip::uint, dip::uint ) override {
// Actual cost depends on data!
return lineLength * 12;
}
virtual void Filter( Framework::SeparableLineFilterParameters const& params ) override {
TPI* in = static_cast< TPI* >( params.inBuffer.buffer );
dip::uint length = params.inBuffer.length;
dip::sint inStride = params.inBuffer.stride;
TPI* out = static_cast< TPI* >( params.outBuffer.buffer );
dip::sint outStride = params.outBuffer.stride;
TPI lambda = static_cast< TPI >( 1.0 / ( params_[ params.dimension ] * params_[ params.dimension ] ));
// Allocate buffer if it's not yet there.
if( buffers_[ params.thread ].size() != length ) {
buffers_[ params.thread ].resize( length );
}
TPI* buf = buffers_[ params.thread ].data();
*buf = *in;
in += inStride;
++buf;
dip::sint index = 0;
if( dilation_ ) {
// Start with processing the line from left to right
for( dip::uint ii = 1; ii < length; ++ii ) {
--index;
if( *in >= *( buf - 1 )) {
*buf = *in;
index = 0;
} else {
TPI max = std::numeric_limits< TPI >::lowest();
for( dip::sint jj = index; jj <= 0; ++jj ) {
TPI val = in[ jj * inStride ] - lambda * static_cast< TPI >( jj * jj );
if( val >= max ) {
max = val;
index = jj;
}
}
*buf = max;
}
in += inStride;
++buf;
}
// Now process the line from right to left
out += static_cast< dip::sint >( length - 1 ) * outStride;
--buf;
*out = *buf;
out -= outStride;
--buf;
index = 0;
for( dip::uint ii = 1; ii < length; ++ii ) {
++index;
if( *buf >= *( out + outStride )) {
*out = *buf;
index = 0;
} else {
TPI max = std::numeric_limits< TPI >::lowest();
for(dip::sint jj = index; jj >= 0; --jj ) {
TPI val = buf[ jj ] - lambda * static_cast< TPI >( jj * jj );
if( val >= max ) {
max = val;
index = jj;
}
}
*out = max;
}
out -= outStride;
--buf;
}
} else {
// Start with processing the line from left to right
for( dip::uint ii = 1; ii < length; ++ii ) {
--index;
if( *in <= *( buf - 1 )) {
*buf = *in;
index = 0;
} else {
TPI min = std::numeric_limits< TPI >::max();
for( dip::sint jj = index; jj <= 0; ++jj ) {
TPI val = in[ jj * inStride ] + lambda * static_cast< TPI >( jj * jj );
if( val <= min ) {
min = val;
index = jj;
}
}
*buf = min;
}
in += inStride;
++buf;
}
// Now process the line from right to left
out += static_cast< dip::sint >( length - 1 ) * outStride;
--buf;
*out = *buf;
out -= outStride;
--buf;
index = 0;
for( dip::uint ii = 1; ii < length; ++ii ) {
++index;
if( *buf <= *( out + outStride )) {
*out = *buf;
index = 0;
} else {
TPI min = std::numeric_limits< TPI >::max();
for(dip::sint jj = index; jj >= 0; --jj ) {
TPI val = buf[ jj ] + lambda * static_cast< TPI >( jj * jj );
if( val <= min ) {
min = val;
index = jj;
}
}
*out = min;
}
out -= outStride;
--buf;
}
}
}
private:
FloatArray const& params_;
std::vector< std::vector< TPI >> buffers_; // one for each thread
bool dilation_;
};
void ParabolicMorphology(
Image const& in,
Image& out,
FloatArray const& filterParam,
BoundaryConditionArray const& bc, // will not be used, as border==0.
BasicMorphologyOperation operation
) {
dip::uint nDims = in.Dimensionality();
BooleanArray process( nDims, false );
for( dip::uint ii = 0; ii < nDims; ++ii ) {
if( filterParam[ ii ] > 0.0 ) {
process[ ii ] = true;
}
}
DataType dtype = DataType::SuggestFlex( in.DataType() ); // Returns either float or complex. If complex, DIP_OVL_NEW_FLOAT will throw.
std::unique_ptr< Framework::SeparableLineFilter > lineFilter;
DIP_START_STACK_TRACE
switch( operation ) {
case BasicMorphologyOperation::DILATION:
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::DILATION ), dtype );
Framework::Separable( in, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
break;
case BasicMorphologyOperation::EROSION:
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::EROSION ), dtype );
Framework::Separable( in, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
break;
case BasicMorphologyOperation::CLOSING:
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::DILATION ), dtype );
Framework::Separable( in, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::EROSION ), dtype );
Framework::Separable( out, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
break;
case BasicMorphologyOperation::OPENING:
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::EROSION ), dtype );
Framework::Separable( in, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
DIP_OVL_NEW_FLOAT( lineFilter, ParabolicMorphologyLineFilter, ( filterParam, Polarity::DILATION ), dtype );
Framework::Separable( out, out, dtype, dtype, process, { 0 }, bc, *lineFilter );
break;
}
DIP_END_STACK_TRACE
}
// --- Basic 3x3 diamond-shaped SE ---
template< typename TPI >
class Elemental2DDiamondMorphologyLineFilter : public Framework::ScanLineFilter {
public:
virtual dip::uint GetNumberOfOperations( dip::uint, dip::uint, dip::uint ) override {
return 5; // number of pixels in SE.
}
virtual void Filter( Framework::ScanLineFilterParameters const& params ) override {
auto bufferLength = params.bufferLength;
TPI const* in = static_cast< TPI const* >( params.inBuffer[ 0 ].buffer );
auto inStride = params.inBuffer[ 0 ].stride;
TPI* out = static_cast< TPI* >( params.outBuffer[ 0 ].buffer );
auto outStride = params.outBuffer[ 0 ].stride;
// Are we processing along a dimension we're also filtering in?
dip::uint procDim = 0;
if( dim1_ == params.dimension ) {
procDim = 1;
} else if( dim2_ == params.dimension ) {
procDim = 2;
}
// Determine if the processing line is on an edge of the image or not
int edge1 = 0; // -1 = top; 1 = bottom
int edge2 = 0; // -1 = top; 1 = bottom
if( procDim != 1 ) {
if( params.position[ dim1_ ] == 0 ) {
edge1 = -1;
} else if ( params.position[ dim1_ ] == size1_ - 1 ) {
edge1 = 1;
}
}
if( procDim != 2 ) {
if( params.position[ dim2_ ] == 0 ) {
edge2 = -1;
} else if ( params.position[ dim2_ ] == size2_ - 1 ) {
edge2 = 1;
}
}
if(( edge1 != 0 ) || ( edge2 != 0 )) {
// Tread carefully!
// First pixel
dip::uint ii = 0;
TPI val = in[ 0 ];
if(( procDim != 1 ) && ( edge1 != -1 )) {
val = dilation_ ? std::max( val, in[ -stride1_ ] ) : std::min( val, in[ -stride1_ ] );
}
if( edge1 != 1 ) {
val = dilation_ ? std::max( val, in[ stride1_ ] ) : std::min( val, in[ stride1_ ] );
}
if(( procDim != 2 ) && ( edge2 != -1 )) {
val = dilation_ ? std::max( val, in[ -stride2_ ] ) : std::min( val, in[ -stride2_ ] );
}
if( edge2 != 1 ) {
val = dilation_ ? std::max( val, in[ stride2_ ] ) : std::min( val, in[ stride2_ ] );
}
*out = val;
in += inStride;
out += outStride;
// Most pixels
for( ii = 1; ii < bufferLength - 1; ++ii ) {
val = in[ 0 ];
if( edge1 != -1 ) {
val = dilation_ ? std::max( val, in[ -stride1_ ] ) : std::min( val, in[ -stride1_ ] );
}
if( edge1 != 1 ) {
val = dilation_ ? std::max( val, in[ stride1_ ] ) : std::min( val, in[ stride1_ ] );
}
if( edge2 != -1 ) {
val = dilation_ ? std::max( val, in[ -stride2_ ] ) : std::min( val, in[ -stride2_ ] );
}
if( edge2 != 1 ) {
val = dilation_ ? std::max( val, in[ stride2_ ] ) : std::min( val, in[ stride2_ ] );
}
*out = val;
in += inStride;
out += outStride;
}
// Last pixel
val = in[ 0 ];
if( edge1 != -1 ) {
val = dilation_ ? std::max( val, in[ -stride1_ ] ) : std::min( val, in[ -stride1_ ] );
}
if(( procDim != 1 ) && ( edge1 != 1 )) {
val = dilation_ ? std::max( val, in[ stride1_ ] ) : std::min( val, in[ stride1_ ] );
}
if( edge2 != -1 ) {
val = dilation_ ? std::max( val, in[ -stride2_ ] ) : std::min( val, in[ -stride2_ ] );
}
if(( procDim != 2 ) && ( edge2 != 1 )) {
val = dilation_ ? std::max( val, in[ stride2_ ] ) : std::min( val, in[ stride2_ ] );
}
*out = val;
} else {
// Otherwise, just plow ahead. Only the first and last pixel can access outside of image domain
if( dilation_ ) {
// First pixel
dip::uint ii = 0;
TPI val = in[ 0 ];
if( procDim != 1 ) {
val = std::max( val, in[ -stride1_ ] );
}
val = std::max( val, in[ stride1_ ] );
if( procDim != 2 ) {
val = std::max( val, in[ -stride2_ ] );
}
val = std::max( val, in[ stride2_ ] );
*out = val;
in += inStride;
out += outStride;
// Most pixels
for( ii = 1; ii < bufferLength - 1; ++ii ) {
val = in[ 0 ];
val = std::max( val, in[ -stride1_ ] );
val = std::max( val, in[ stride1_ ] );
val = std::max( val, in[ -stride2_ ] );
val = std::max( val, in[ stride2_ ] );
*out = val;
in += inStride;
out += outStride;
}
// Last pixel
val = in[ 0 ];
val = std::max( val, in[ -stride1_ ] );
if( procDim != 1 ) {
val = std::max( val, in[ stride1_ ] );
}
val = std::max( val, in[ -stride2_ ] );
if( procDim != 2 ) {
val = std::max( val, in[ stride2_ ] );
}
*out = val;
} else { // erosion
// First pixel
dip::uint ii = 0;
TPI val = in[ 0 ];
if( procDim != 1 ) {
val = std::min( val, in[ -stride1_ ] );
}
val = std::min( val, in[ stride1_ ] );
if( procDim != 2 ) {
val = std::min( val, in[ -stride2_ ] );
}
val = std::min( val, in[ stride2_ ] );
*out = val;
in += inStride;
out += outStride;
// Most pixels
for( ii = 1; ii < bufferLength - 1; ++ii ) {
val = in[ 0 ];
val = std::min( val, in[ -stride1_ ] );
val = std::min( val, in[ stride1_ ] );
val = std::min( val, in[ -stride2_ ] );
val = std::min( val, in[ stride2_ ] );
*out = val;
in += inStride;
out += outStride;
}
// Last pixel
val = in[ 0 ];
val = std::min( val, in[ -stride1_ ] );
if( procDim != 1 ) {
val = std::min( val, in[ stride1_ ] );
}
val = std::min( val, in[ -stride2_ ] );
if( procDim != 2 ) {
val = std::min( val, in[ stride2_ ] );
}
*out = val;
}
}
}
Elemental2DDiamondMorphologyLineFilter(
dip::uint dim1, dip::uint dim2, dip::uint size1, dip::uint size2,
dip::sint stride1, dip::sint stride2, Polarity polarity
) : dim1_( dim1 ), dim2_( dim2 ), size1_( size1 ), size2_( size2 ),
stride1_( stride1 ), stride2_( stride2 ), dilation_( polarity == Polarity::DILATION ) {}
private:
dip::uint dim1_;
dip::uint dim2_;
dip::uint size1_; // size of dim1
dip::uint size2_; // size of dim2
dip::sint stride1_; // stride of dim1
dip::sint stride2_; // stride of dim2
bool dilation_;
};
void Elemental2DDiamondMorphology(
Image const& c_in,
Image& out,
dip::uint dim1, // dimension index to work in
dip::uint dim2, // other dimension index to work in -- this is a 2D diamond operation
Polarity polarity
) {
Image in = c_in.QuickCopy();
if( out.Aliases( in )) {
DIP_STACK_TRACE_THIS( out.Strip() ); // We cannot work in place, ensure we get a new output image allocated
}
DataType dt = in.DataType();
DIP_START_STACK_TRACE
std::unique_ptr< Framework::ScanLineFilter > lineFilter;
DIP_OVL_NEW_NONCOMPLEX( lineFilter, Elemental2DDiamondMorphologyLineFilter,
( dim1, dim2, in.Size( dim1 ), in.Size( dim2 ), in.Stride( dim1 ), in.Stride( dim2 ), polarity ),
dt );
// We're using the Scan framework, but we're being careful to ensure that no buffers are used, it will
// guaranteed pass pointers to the input and output images.
Framework::ScanMonadic( in, out, dt, dt, 1, *lineFilter, { Framework::ScanOption::NeedCoordinates } );
DIP_END_STACK_TRACE
}
void Elemental2DDiamondMorphology(
Image const& in,
Image& out,
dip::uint dim1, // dimension index to work in
dip::uint dim2, // other dimension index to work in -- this is a 2D diamond operation
BasicMorphologyOperation operation,
dip::uint repetitions // keep this small!
) {
switch( operation ) {
//case BasicMorphologyOperation::DILATION:
//case BasicMorphologyOperation::EROSION:
default:
Elemental2DDiamondMorphology( in, out, dim1, dim2, operation == BasicMorphologyOperation::DILATION ? Polarity::DILATION : Polarity::EROSION );
for( dip::uint ii = 1; ii < repetitions; ++ii ) {
Elemental2DDiamondMorphology( out, out, dim1, dim2, operation == BasicMorphologyOperation::DILATION ? Polarity::DILATION : Polarity::EROSION );
}
break;
case BasicMorphologyOperation::CLOSING:
Elemental2DDiamondMorphology( in, out, dim1, dim2, BasicMorphologyOperation::DILATION, repetitions );
Elemental2DDiamondMorphology( out, out, dim1, dim2, BasicMorphologyOperation::EROSION, repetitions );
break;
case BasicMorphologyOperation::OPENING:
Elemental2DDiamondMorphology( in, out, dim1, dim2, BasicMorphologyOperation::EROSION, repetitions );
Elemental2DDiamondMorphology( out, out, dim1, dim2, BasicMorphologyOperation::DILATION, repetitions );
break;
}
}
// --- Composed SEs ---
void LineMorphology(
Image const& in,
Image& out,
FloatArray filterParam, // by copy
Mirror mirror,
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation
) {
// Normalize direction so that, for even-sized lines, the origin is in a consistent place.
if( filterParam[ 0 ] < 0 ) {
for( auto& l: filterParam ) {
l = -l;
}
}
dip::uint maxSize;
dip::uint steps;
std::tie( maxSize, steps ) = PeriodicLineParameters( filterParam );
if( steps == maxSize ) {
// This means that all filterParam are the same (or 1)
FastLineMorphology( in, out, filterParam, StructuringElement::ShapeCode::FAST_LINE, mirror, bc, operation );
} else {
if(( steps > 1 ) && ( maxSize > 5 )) { // TODO: an optimal threshold here is impossible to determine. It depends on the processing dimension and the angle of the line.
dip::uint nDims = in.Dimensionality();
FloatArray discreteLineParam( nDims, 0.0 );
for( dip::uint ii = 0; ii < nDims; ++ii ) {
discreteLineParam[ ii ] = std::copysign( std::round( std::abs( filterParam[ ii ] )), filterParam[ ii ] ) / static_cast< dfloat >( steps );
}
// If the periodic line with even number of points, then the discrete line has origin at left side, to
// correct for origin displacement of periodic line
Kernel discreteLineKernel(( steps & 1 ) ? Kernel::ShapeCode::LINE : Kernel::ShapeCode::LEFT_LINE, discreteLineParam );
if( mirror == Mirror::YES ) {
discreteLineKernel.Mirror();
}
switch( operation ) {
default:
//case BasicMorphologyOperation::DILATION:
//case BasicMorphologyOperation::EROSION:
GeneralSEMorphology( in, out, discreteLineKernel, bc, operation );
FastLineMorphology( out, out, filterParam, StructuringElement::ShapeCode::PERIODIC_LINE, mirror, bc, operation );
break;
case BasicMorphologyOperation::CLOSING:
GeneralSEMorphology( in, out, discreteLineKernel, bc, BasicMorphologyOperation::DILATION );
FastLineMorphology( out, out, filterParam, StructuringElement::ShapeCode::PERIODIC_LINE, mirror, bc, BasicMorphologyOperation::CLOSING );
discreteLineKernel.Mirror();
GeneralSEMorphology( out, out, discreteLineKernel, bc, BasicMorphologyOperation::EROSION );
break;
case BasicMorphologyOperation::OPENING:
GeneralSEMorphology( in, out, discreteLineKernel, bc, BasicMorphologyOperation::EROSION );
FastLineMorphology( out, out, filterParam, StructuringElement::ShapeCode::PERIODIC_LINE, mirror, bc, BasicMorphologyOperation::OPENING );
discreteLineKernel.Mirror();
GeneralSEMorphology( out, out, discreteLineKernel, bc, BasicMorphologyOperation::DILATION );
break;
}
} else {
// One step, no need to do a periodic line with a single point
Kernel kernel( Kernel::ShapeCode::LINE, filterParam );
if( mirror == Mirror::YES ) {
kernel.Mirror();
}
GeneralSEMorphology( in, out, kernel, bc, operation );
}
}
}
void TwoStep2DDiamondMorphology(
Image const& in,
Image& out,
dfloat lineLength,
dip::uint procDim,
dip::uint dim2,
Mirror mirror,
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation // should be either DILATION or EROSION.
) {
FloatArray size( in.Dimensionality(), 1.0 );
size[ procDim ] = lineLength;
size[ dim2 ] = lineLength;
FastLineMorphology( in, out, size, StructuringElement::ShapeCode::FAST_LINE, mirror, bc, operation );
size[ dim2 ] = -lineLength;
FastLineMorphology( out, out, size, StructuringElement::ShapeCode::FAST_LINE, mirror, bc, operation );
}
void DiamondMorphology(
Image const& in,
Image& out,
FloatArray size, // by copy, we'll modify it
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation
) {
dip::uint nDims = in.Dimensionality();
dfloat param = 0; // will always be an odd integer
bool isotropic = true;
dip::uint nProcDims = 0; // number of dimensions with size > 1
dip::uint procDim = 0; // first dimension with size > 1
dip::uint dim2 = 0; // last dimension with size > 1
for( dip::uint ii = 0; ii < nDims; ++ii ) {
size[ ii ] = std::floor( size[ ii ] / 2 ) * 2 + 1; // an odd size, same as in `dip::PixelTable::PixelTable(S::DIAMOND)`
if( size[ ii ] > 1 ) {
++nProcDims;
if( param == 0 ) {
param = size[ ii ];
procDim = ii;
} else if( size[ ii ] != param ) {
isotropic = false;
break;
}
dim2 = ii;
}
}
if( nProcDims <= 1 ) {
DIP_STACK_TRACE_THIS( RectangularMorphology( in, out, size, Mirror::NO, bc, operation ));
return;
}
if( !isotropic || ( nProcDims > 2 )) {
// We cannot do decomposition if not isotropic, or if too small, or if more than 2D
DIP_START_STACK_TRACE
Kernel kernel{ Kernel::ShapeCode::DIAMOND, size };
GeneralSEMorphology( in, out, kernel, bc, operation );
DIP_END_STACK_TRACE
return;
}
if( param <= 9 ) { // Optimal threshold here depends on image size, machine architecture, etc.
// We can do this with a few iterations of the elemental 2D diamond, which is faster than the other decomposition.
dip::uint reps = dip::uint( param ) / 2; // param is always an odd integer
Elemental2DDiamondMorphology( in, out, procDim, dim2, operation, reps );
return;
}
// Separate 2D diamond SE: unit diamond + two lines at 45 degrees.
dfloat lineLength = std::round(( param - 3.0 ) / 2.0 + 1.0 ); // rounding just in case there's a rounding error, but in principle this always gives a round number.
DIP_START_STACK_TRACE
switch( operation ) {
//case BasicMorphologyOperation::DILATION:
//case BasicMorphologyOperation::EROSION:
default:
// TODO: For fully correct operation, we should do boundary expansion first, then these two operations, then crop.
Elemental2DDiamondMorphology( in, out, procDim, dim2, operation, 1 );
if( !( static_cast< dip::sint >( lineLength ) & 1u )) {
// For even-sized lines, we need an additional one-pixel shift
FloatArray shift( in.Dimensionality(), 0 );
shift[ procDim ] = -1;
BoundaryCondition default_bc = ( operation == BasicMorphologyOperation::DILATION )
? BoundaryCondition::ADD_MIN_VALUE : BoundaryCondition::ADD_MAX_VALUE;
Resampling( out, out, { 1.0 }, shift, S::NEAREST, bc.empty() ? BoundaryConditionArray{ default_bc } : bc );
}
TwoStep2DDiamondMorphology( out, out, lineLength, procDim, dim2, Mirror::NO, bc, operation );
break;
case BasicMorphologyOperation::CLOSING:
// For closings and openings we can ignore the shift, we just need to mirror the lines in the 2nd application.
TwoStep2DDiamondMorphology( in, out, lineLength, procDim, dim2, Mirror::NO, bc, BasicMorphologyOperation::DILATION );
Elemental2DDiamondMorphology( out, out, procDim, dim2, operation, 1 );
TwoStep2DDiamondMorphology( out, out, lineLength, procDim, dim2, Mirror::YES, bc, BasicMorphologyOperation::EROSION );
break;
case BasicMorphologyOperation::OPENING:
TwoStep2DDiamondMorphology( in, out, lineLength, procDim, dim2, Mirror::NO, bc, BasicMorphologyOperation::EROSION );
Elemental2DDiamondMorphology( out, out, procDim, dim2, operation, 1 );
TwoStep2DDiamondMorphology( out, out, lineLength, procDim, dim2, Mirror::YES, bc, BasicMorphologyOperation::DILATION );
break;
}
DIP_END_STACK_TRACE
}
void OctagonalMorphology(
Image const& in,
Image& out,
FloatArray size, // by copy
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation
) {
// An octagon is formed by a diamond of size n, and a rectangle of size m = n - 2 or m = n.
// Both n and m are odd integers. The octagon then has a size of n + m - 1.
// We allow anisotropic octagons by increasing some dimensions of the rectangle (but not decreasing).
// That is, the diamond will be isotropic, and the rectangle will have at least one side of size m,
// other dimensions of the rectangle can be larger.
// Any dimension with an extension of 1 is not included in these calculations.
DIP_START_STACK_TRACE
// Determine the smallest dimension (excluding dimensions of size 1)
dfloat smallestSize = 0.0;
for( dfloat& sz : size ) {
sz = std::floor(( sz - 1 ) / 2 ) * 2 + 1; // an odd integer smaller or equal to sz.
if( sz >= 3.0 ) {
if( smallestSize == 0.0 ) {
smallestSize = sz;
} else {
smallestSize = std::min( smallestSize, sz );
}
} else {
sz = 1.0;
}
}
if( smallestSize == 0.0 ) {
// No dimension >= 3
out.Copy( in );
return;
}
// Given size = n + m + 1, determine n, the size of the diamond
dfloat n = 2.0 * floor(( smallestSize + 1.0 ) / 4.0 ) + 1.0;
bool skipRect = true;
FloatArray rectSize( size.size(), 1.0 );
for( dip::uint ii = 0; ii < size.size(); ++ii ) {
if( size[ ii ] >= 3.0 ) {
// at least 3 pixels in this dimension
rectSize[ ii ] = size[ ii ] - n + 1.0;
if( rectSize[ ii ] > 1 ) {
skipRect = false;
}
size[ ii ] = n;
}
}
switch( operation ) {
default:
//case BasicMorphologyOperation::DILATION:
//case BasicMorphologyOperation::EROSION:
// Step 1: apply operation with a diamond
// TODO: This can be simpler, we only need the line SEs in DiamondMorphology, not the unit diamond.
DiamondMorphology( in, out, size, bc, operation );
if( !skipRect ) {
// Step 2: apply operation with a rectangle
RectangularMorphology( out, out, rectSize, Mirror::NO, bc, operation );
}
break;
case BasicMorphologyOperation::CLOSING:
if( skipRect ) {
DiamondMorphology( in, out, size, bc, BasicMorphologyOperation::CLOSING );
} else {
RectangularMorphology( in, out, rectSize, Mirror::NO, bc, BasicMorphologyOperation::DILATION );
DiamondMorphology( out, out, size, bc, BasicMorphologyOperation::CLOSING );
RectangularMorphology( out, out, rectSize, Mirror::YES, bc, BasicMorphologyOperation::EROSION );
}
break;
case BasicMorphologyOperation::OPENING:
if( skipRect ) {
DiamondMorphology( in, out, size, bc, BasicMorphologyOperation::OPENING );
} else {
RectangularMorphology( in, out, rectSize, Mirror::NO, bc, BasicMorphologyOperation::EROSION );
DiamondMorphology( out, out, size, bc, BasicMorphologyOperation::OPENING );
RectangularMorphology( out, out, rectSize, Mirror::YES, bc, BasicMorphologyOperation::DILATION );
}
break;
}
DIP_END_STACK_TRACE
}
void EllipticMorphology(
Image const& in,
Image& out,
FloatArray const& ellipseSizes,
BoundaryConditionArray const& bc,
BasicMorphologyOperation operation
) {
// Small disks look like diamonds or rectangles
// In 2D:
// sizes > sqrt(20) = 4.4721 : elliptic
// sizes > 4 : diamond 5x5
// sizes > sqrt(8) = 2.8284 : square 3x3
// sizes > 2 : diamond 3x3
// otherwise : null-op
// TODO: In 3D?
dfloat diameter = 0;
dfloat param = 0;
bool isotropic = true;
FloatArray sizes = ellipseSizes;
dip::uint dim1 = 0;
dip::uint dim2 = 0;
dip::uint nDims = 0;
for( dip::uint ii = 0; ii < sizes.size(); ++ii ) {
if( sizes[ ii ] > 2 ) {
if( diameter == 0 ) {
diameter = sizes[ ii ];
sizes[ ii ] = param = diameter <= 4 ? 3.0 : 5.0; // Sets right size for small diamond or square approximation
dim1 = ii;
} else if( sizes[ ii ] == diameter ) {
sizes[ ii ] = param;
} else {
isotropic = false;
}
dim2 = ii;
++nDims;
} else {
sizes[ ii ] = 1;
}
}
if( diameter == 0 ) { // happens if diameter <= 2
// Null op
out.Copy( in );
return;
}
if( nDims == 1 ) {
// In 1D everything is a rectangle
sizes[ dim1 ] = std::floor(( ellipseSizes[ dim1 ] - 1e-6 ) / 2 ) * 2 + 1;
RectangularMorphology( in, out, sizes, Mirror::NO, bc, operation );
return;
}
if( isotropic && ( nDims == 2 )) {
if( diameter <= std::sqrt( 8 )) {
// diamond size 3
Elemental2DDiamondMorphology( in, out, dim1, dim2, operation, 1 );
return;
}
if( diameter <= 4 ) {
// square size 3
RectangularMorphology( in, out, sizes, Mirror::NO, bc, operation );
return;
}
if ( diameter <= std::sqrt( 20 )) {
// diamond size 5
Elemental2DDiamondMorphology( in, out, dim1, dim2, operation, 2 );
return;
}
}
// SEs with more than 2 dimensions handled as general SEs
// Larger disk SEs handled as general SEs
// Non-isotropic elliptic SEs handled as general SEs
Kernel kernel{ Kernel::ShapeCode::ELLIPTIC, ellipseSizes };
GeneralSEMorphology( in, out, kernel, bc, operation );
}
} // namespace
// --- Dispatch ---
void BasicMorphology(
Image const& in,
Image& out,
StructuringElement const& se,
StringArray const& boundaryCondition,
BasicMorphologyOperation operation
) {
DIP_THROW_IF( !in.IsForged(), E::IMAGE_NOT_FORGED );
DIP_THROW_IF( !in.IsScalar(), E::IMAGE_NOT_SCALAR );
DIP_THROW_IF( in.DataType().IsComplex(), E::DATA_TYPE_NOT_SUPPORTED );
DIP_THROW_IF( in.Dimensionality() < 1, E::DIMENSIONALITY_NOT_SUPPORTED );
DIP_START_STACK_TRACE
BoundaryConditionArray bc = StringArrayToBoundaryConditionArray( boundaryCondition );
Mirror mirror = GetMirrorParam( se.IsMirrored() );
switch( se.Shape() ) {
case StructuringElement::ShapeCode::RECTANGULAR:
RectangularMorphology( in, out, se.Params( in.Sizes() ), mirror, bc, operation );
break;
case StructuringElement::ShapeCode::ELLIPTIC:
EllipticMorphology( in, out, se.Params( in.Sizes()), bc, operation );
break;
case StructuringElement::ShapeCode::DIAMOND:
DiamondMorphology( in, out, se.Params( in.Sizes() ), bc, operation );
break;
case StructuringElement::ShapeCode::OCTAGONAL:
OctagonalMorphology( in, out, se.Params( in.Sizes() ), bc, operation );
break;
case StructuringElement::ShapeCode::LINE:
LineMorphology( in, out, se.Params( in.Sizes() ), mirror, bc, operation );
break;
case StructuringElement::ShapeCode::FAST_LINE:
case StructuringElement::ShapeCode::PERIODIC_LINE:
FastLineMorphology( in, out, se.Params( in.Sizes() ), se.Shape(), mirror, bc, operation );
break;
case StructuringElement::ShapeCode::INTERPOLATED_LINE:
SkewLineMorphology( in, out, se.Params( in.Sizes() ), mirror, bc, operation );
break;
case StructuringElement::ShapeCode::PARABOLIC:
ParabolicMorphology( in, out, se.Params( in.Sizes() ), bc, operation );
break;
//case StructuringElement::ShapeCode::DISCRETE_LINE:
//case StructuringElement::ShapeCode::CUSTOM:
default: {
Kernel kernel = se.Kernel();
GeneralSEMorphology( in, out, kernel, bc, operation );
break;
}
}
DIP_END_STACK_TRACE
}
} // namespace detail
} // namespace dip
#ifdef DIP_CONFIG_ENABLE_DOCTEST
#include "doctest.h"
#include "diplib/statistics.h"
#include "diplib/iterators.h"
DOCTEST_TEST_CASE("[DIPlib] testing the basic morphological filters") {
dip::Image in( { 64, 41 }, 1, dip::DT_UINT8 );
in = 0;
dip::uint pval = 3 * 3;
in.At( 32, 20 ) = pval;
dip::Image out;
// Rectangular morphology
dip::StructuringElement se = {{ 2, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 2 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
se = {{ 3, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 3 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
se = {{ 10, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 10 );
se = {{ 11, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 11 );
se = {{ 10, 11 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 10*11 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
se = {{ 2, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 1, 3 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 10, 1 }, "rectangular" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
// PixelTable morphology
se = {{ 1, 10 }, "elliptic" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 9 ); // rounded!
se = {{ 1, 11 }, "elliptic" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 11 );
se = {{ 3, 3 }, "elliptic" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 9 );
se = {{ 10, 11 }, "elliptic" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 87 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
// PixelTable morphology -- mirroring
dip::Image seImg( { 10, 10 }, 1, dip::DT_BIN );
seImg.Fill( 1 );
se = seImg;
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 100 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
// Parabolic morphology
se = {{ 10.0, 0.0 }, "parabolic" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
dip::dfloat result = 0.0;
for( dip::uint ii = 1; ii < 30; ++ii ) { // 30 = 10.0 * sqrt( pval )
result += dip::dfloat( pval ) - dip::dfloat( ii * ii ) / 100.0; // 100.0 = 10.0 * 10.0
}
result = dip::dfloat( pval ) + result * 2.0;
DOCTEST_CHECK( dip::Sum( out ).As< dip::dfloat >() == doctest::Approx( result ));
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is the origin in the right place?
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
result = 0.0;
for( dip::uint ii = 1; ii < 30; ++ii ) { // 30 = 10.0 * sqrt( pval )
result += dip::dfloat( ii * ii ) / 100.0; // 100.0 = 10.0 * 10.0
}
result = dip::dfloat( pval ) + result * 2.0;
DOCTEST_CHECK( dip::Sum( out ).As< dip::dfloat >() == doctest::Approx( result ));
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is the origin in the right place?
// Grey-value SE morphology
seImg = dip::Image( { 5, 6 }, 1, dip::DT_SFLOAT );
seImg = -dip::infinity;
seImg.At( 0, 0 ) = 0;
seImg.At( 4, 5 ) = -5;
seImg.At( 0, 5 ) = -5;
seImg.At( 4, 0 ) = -8;
seImg.At( 2, 3 ) = 0;
se = seImg;
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Sum( out ).As< dip::uint >() == 5 * pval - 5 - 5 - 8 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is the main pixel in the right place and with the right value?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
// Line morphology
se = {{ 10, 4 }, "discrete line" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 10 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 10, 4 }, "fast line" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 10 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 8, 4 }, "fast line" };
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 8 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 10, 4 }, "line" }; // periodic component n=2, discrete line {5,2}
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 10 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 8, 4 }, "line" }; // periodic component n=4, discrete line {2,1}
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 8 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 9, 6 }, "line" }; // periodic component n=3, discrete line {3,2}
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 9 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 12, 9 }, "line" }; // periodic component n=3, discrete line {4,3}
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 12 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
se = {{ 8, 9 }, "line" }; // periodic component n=1, discrete line {8,9}
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::DILATION );
DOCTEST_CHECK( dip::Count( out ) == 9 );
se.Mirror();
dip::detail::BasicMorphology( out, out, se, {}, dip::detail::BasicMorphologyOperation::EROSION );
DOCTEST_CHECK( dip::Count( out ) == 1 ); // Did the erosion return the image to a single pixel?
DOCTEST_CHECK( out.At( 32, 20 ) == pval ); // Is that pixel in the right place?
dip::detail::BasicMorphology( in, out, se, {}, dip::detail::BasicMorphologyOperation::CLOSING );
DOCTEST_CHECK( dip::Count( out ) == 1 );
DOCTEST_CHECK( out.At( 32, 20 ) == pval );
}
#ifdef _OPENMP
#include "diplib/multithreading.h"
#include "diplib/generation.h"
#include "diplib/testing.h"
DOCTEST_TEST_CASE("[DIPlib] testing the full framework under multithreading") {
// Compute using one thread
dip::SetNumberOfThreads( 1 );
// Generate test image
dip::Image img{ dip::UnsignedArray{ 256, 192, 59 }, 1, dip::DT_DFLOAT };
img.Fill( 0 );
dip::Random random( 0 );
dip::GaussianNoise( img, img, random );
// Apply separable filter using one thread
dip::Image out1 = dip::Dilation( img, { 5, "elliptic" } );
// Reset number of threads
dip::SetNumberOfThreads( 0 );
// Apply separable filter using all threads
dip::Image out2 = dip::Dilation( img, { 5, "elliptic" } );
// Compare
DOCTEST_CHECK( dip::testing::CompareImages( out1, out2, dip::Option::CompareImagesMode::EXACT ));
}
#endif // _OPENMP
#endif // DIP_CONFIG_ENABLE_DOCTEST
| apache-2.0 |
webadvancedservicescom/magento | app/code/Magento/Ui/Component/Filter/Type/Select.php | 585 | <?php
/**
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
namespace Magento\Ui\Component\Filter\Type;
use Magento\Ui\Component\Filter\FilterAbstract;
/**
* Class Select
*/
class Select extends FilterAbstract
{
/**
* Get condition by data type
*
* @param string|array $value
* @return array|null
*/
public function getCondition($value)
{
$condition = null;
if (!empty($value) || is_numeric($value)) {
$condition = ['eq' => $value];
}
return $condition;
}
}
| apache-2.0 |
Thingee/cinder | cinder/api/contrib/volume_actions.py | 15333 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from oslo import messaging
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api import xmlutil
from cinder import exception
from cinder.openstack.common import log as logging
from cinder.openstack.common import strutils
from cinder import utils
from cinder import volume
LOG = logging.getLogger(__name__)
def authorize(context, action_name):
action = 'volume_actions:%s' % action_name
extensions.extension_authorizer('volume', action)(context)
class VolumeToImageSerializer(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('os-volume_upload_image',
selector='os-volume_upload_image')
root.set('id')
root.set('updated_at')
root.set('status')
root.set('display_description')
root.set('size')
root.set('volume_type')
root.set('image_id')
root.set('container_format')
root.set('disk_format')
root.set('image_name')
return xmlutil.MasterTemplate(root, 1)
class VolumeToImageDeserializer(wsgi.XMLDeserializer):
"""Deserializer to handle xml-formatted requests."""
def default(self, string):
dom = utils.safe_minidom_parse_string(string)
action_node = dom.childNodes[0]
action_name = action_node.tagName
action_data = {}
attributes = ["force", "image_name", "container_format", "disk_format"]
for attr in attributes:
if action_node.hasAttribute(attr):
action_data[attr] = action_node.getAttribute(attr)
if 'force' in action_data and action_data['force'] == 'True':
action_data['force'] = True
return {'body': {action_name: action_data}}
class VolumeActionsController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(VolumeActionsController, self).__init__(*args, **kwargs)
self.volume_api = volume.API()
@wsgi.action('os-attach')
def _attach(self, req, id, body):
"""Add attachment metadata."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
# instance uuid is an option now
instance_uuid = None
if 'instance_uuid' in body['os-attach']:
instance_uuid = body['os-attach']['instance_uuid']
host_name = None
# Keep API backward compatibility
if 'host_name' in body['os-attach']:
host_name = body['os-attach']['host_name']
mountpoint = body['os-attach']['mountpoint']
if 'mode' in body['os-attach']:
mode = body['os-attach']['mode']
else:
mode = 'rw'
if instance_uuid and host_name:
msg = _("Invalid request to attach volume to an "
"instance %(instance_uuid)s and a "
"host %(host_name)s simultaneously") % {
'instance_uuid': instance_uuid,
'host_name': host_name,
}
raise webob.exc.HTTPBadRequest(explanation=msg)
elif instance_uuid is None and host_name is None:
msg = _("Invalid request to attach volume to an invalid target")
raise webob.exc.HTTPBadRequest(explanation=msg)
if mode not in ('rw', 'ro'):
msg = _("Invalid request to attach volume with an invalid mode. "
"Attaching mode should be 'rw' or 'ro'")
raise webob.exc.HTTPBadRequest(explanation=msg)
self.volume_api.attach(context, volume,
instance_uuid, host_name, mountpoint, mode)
return webob.Response(status_int=202)
@wsgi.action('os-detach')
def _detach(self, req, id, body):
"""Clear attachment metadata."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
self.volume_api.detach(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-reserve')
def _reserve(self, req, id, body):
"""Mark volume as reserved."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
self.volume_api.reserve_volume(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-unreserve')
def _unreserve(self, req, id, body):
"""Unmark volume as reserved."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
self.volume_api.unreserve_volume(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-begin_detaching')
def _begin_detaching(self, req, id, body):
"""Update volume status to 'detaching'."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
self.volume_api.begin_detaching(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-roll_detaching')
def _roll_detaching(self, req, id, body):
"""Roll back volume status to 'in-use'."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
self.volume_api.roll_detaching(context, volume)
return webob.Response(status_int=202)
@wsgi.action('os-initialize_connection')
def _initialize_connection(self, req, id, body):
"""Initialize volume attachment."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
try:
connector = body['os-initialize_connection']['connector']
except KeyError:
raise webob.exc.HTTPBadRequest("Must specify 'connector'")
try:
info = self.volume_api.initialize_connection(context,
volume,
connector)
except exception.VolumeBackendAPIException as error:
msg = _("Unable to fetch connection information from backend.")
raise webob.exc.HTTPInternalServerError(msg)
return {'connection_info': info}
@wsgi.action('os-terminate_connection')
def _terminate_connection(self, req, id, body):
"""Terminate volume attachment."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
try:
connector = body['os-terminate_connection']['connector']
except KeyError:
raise webob.exc.HTTPBadRequest("Must specify 'connector'")
try:
self.volume_api.terminate_connection(context, volume, connector)
except exception.VolumeBackendAPIException as error:
msg = _("Unable to terminate volume connection from backend.")
raise webob.exc.HTTPInternalServerError(explanation=msg)
return webob.Response(status_int=202)
@wsgi.response(202)
@wsgi.action('os-volume_upload_image')
@wsgi.serializers(xml=VolumeToImageSerializer)
@wsgi.deserializers(xml=VolumeToImageDeserializer)
def _volume_upload_image(self, req, id, body):
"""Uploads the specified volume to image service."""
context = req.environ['cinder.context']
params = body['os-volume_upload_image']
if not params.get("image_name"):
msg = _("No image_name was specified in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
force = params.get('force', False)
if isinstance(force, basestring):
try:
force = strutils.bool_from_string(force, strict=False)
except ValueError:
msg = _("Bad value for 'force' parameter.")
raise webob.exc.HTTPBadRequest(explanation=msg)
elif not isinstance(force, bool):
msg = _("'force' is not string or bool.")
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
authorize(context, "upload_image")
image_metadata = {"container_format": params.get("container_format",
"bare"),
"disk_format": params.get("disk_format", "raw"),
"name": params["image_name"]}
try:
response = self.volume_api.copy_volume_to_image(context,
volume,
image_metadata,
force)
except exception.InvalidVolume as error:
raise webob.exc.HTTPBadRequest(explanation=error.msg)
except ValueError as error:
raise webob.exc.HTTPBadRequest(explanation=unicode(error))
except messaging.RemoteError as error:
msg = "%(err_type)s: %(err_msg)s" % {'err_type': error.exc_type,
'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
except Exception as error:
raise webob.exc.HTTPBadRequest(explanation=unicode(error))
return {'os-volume_upload_image': response}
@wsgi.action('os-extend')
def _extend(self, req, id, body):
"""Extend size of volume."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
try:
int(body['os-extend']['new_size'])
except (KeyError, ValueError, TypeError):
msg = _("New volume size must be specified as an integer.")
raise webob.exc.HTTPBadRequest(explanation=msg)
size = int(body['os-extend']['new_size'])
self.volume_api.extend(context, volume, size)
return webob.Response(status_int=202)
@wsgi.action('os-update_readonly_flag')
def _volume_readonly_update(self, req, id, body):
"""Update volume readonly flag."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
try:
readonly_flag = body['os-update_readonly_flag']['readonly']
except KeyError:
msg = _("Must specify readonly in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
if isinstance(readonly_flag, basestring):
try:
readonly_flag = strutils.bool_from_string(readonly_flag,
strict=True)
except ValueError:
msg = _("Bad value for 'readonly'")
raise webob.exc.HTTPBadRequest(explanation=msg)
elif not isinstance(readonly_flag, bool):
msg = _("'readonly' not string or bool")
raise webob.exc.HTTPBadRequest(explanation=msg)
self.volume_api.update_readonly_flag(context, volume, readonly_flag)
return webob.Response(status_int=202)
@wsgi.action('os-retype')
def _retype(self, req, id, body):
"""Change type of existing volume."""
context = req.environ['cinder.context']
volume = self.volume_api.get(context, id)
try:
new_type = body['os-retype']['new_type']
except KeyError:
msg = _("New volume type must be specified.")
raise webob.exc.HTTPBadRequest(explanation=msg)
policy = body['os-retype'].get('migration_policy')
self.volume_api.retype(context, volume, new_type, policy)
return webob.Response(status_int=202)
@wsgi.action('os-set_bootable')
def _set_bootable(self, req, id, body):
"""Update bootable status of a volume."""
context = req.environ['cinder.context']
try:
volume = self.volume_api.get(context, id)
except exception.VolumeNotFound as error:
raise webob.exc.HTTPNotFound(explanation=error.msg)
try:
bootable = body['os-set_bootable']['bootable']
except KeyError:
msg = _("Must specify bootable in request.")
raise webob.exc.HTTPBadRequest(explanation=msg)
if isinstance(bootable, basestring):
try:
bootable = strutils.bool_from_string(bootable,
strict=True)
except ValueError:
msg = _("Bad value for 'bootable'")
raise webob.exc.HTTPBadRequest(explanation=msg)
elif not isinstance(bootable, bool):
msg = _("'bootable' not string or bool")
raise webob.exc.HTTPBadRequest(explanation=msg)
update_dict = {'bootable': bootable}
self.volume_api.update(context, volume, update_dict)
return webob.Response(status_int=200)
class Volume_actions(extensions.ExtensionDescriptor):
"""Enable volume actions
"""
name = "VolumeActions"
alias = "os-volume-actions"
namespace = "http://docs.openstack.org/volume/ext/volume-actions/api/v1.1"
updated = "2012-05-31T00:00:00+00:00"
def get_controller_extensions(self):
controller = VolumeActionsController()
extension = extensions.ControllerExtension(self, 'volumes', controller)
return [extension]
| apache-2.0 |
null7/haiegoo | haiegoo/src/com/haiegoo/web/Shopcar.java | 544 | package com.haiegoo.web;
import com.haiegoo.framework.web.HttpServletExtendRequest;
import com.haiegoo.framework.web.HttpServletExtendResponse;
import com.haiegoo.ucenter.utils.controller.PageController;
import org.springframework.web.servlet.ModelAndView;
/**
* 购物车
* @author Linpan
*
*/
public class Shopcar extends PageController {
@Override
public void execute(HttpServletExtendRequest request,
HttpServletExtendResponse response, ModelAndView modeview) {
// TODO Auto-generated method stub
}
}
| apache-2.0 |
CXuesong/MwParserFromScratch | UnitTestProject1/Utility.cs | 6326 | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using MwParserFromScratch.Nodes;
namespace UnitTestProject1
{
internal static class Utility
{
private static readonly Dictionary<Type, Func<Node, string>> dumpHandlers = new Dictionary<Type, Func<Node, string>>();
private static void RegisterDumpHandler<T>(Func<T, string> handler) where T : Node
{
dumpHandlers.Add(typeof(T), n => handler((T) n));
}
static Utility()
{
// Add a $ mark before brackets to escape them
RegisterDumpHandler<PlainText>(n => Regex.Replace(n.Content, @"(?=[\[\]\{\}<>])", "$"));
RegisterDumpHandler<FormatSwitch>(fs =>
{
if (fs.SwitchBold && fs.SwitchItalics)
return "[BI]";
if (fs.SwitchBold)
return "[B]";
if (fs.SwitchItalics)
return "[I]";
return "[]";
});
RegisterDumpHandler<WikiLink>(n => n.Text == null
? $"[[{Dump(n.Target)}]]"
: $"[[{Dump(n.Target)}|{Dump(n.Text)}]]");
RegisterDumpHandler<WikiImageLink>(n =>
{
if (n.Arguments.Count == 0) return "![[" + Dump(n.Target) + "]]";
var sb = new StringBuilder("![[");
sb.Append(n.Target);
foreach (var arg in n.Arguments)
{
sb.Append('|');
sb.Append(Dump(arg));
}
sb.Append("]]");
return sb.ToString();
});
RegisterDumpHandler<WikiImageLinkArgument>(n =>
{
if (n.Name == null) return Dump(n.Value);
return Dump(n.Name) + "=" + Dump(n.Value);
});
RegisterDumpHandler<ExternalLink>(el =>
{
var s = el.ToString();
// Add brackets to distinguish links form normal text.
if (!el.Brackets) return "-[" + s + "]-";
return s;
});
RegisterDumpHandler<Run>(w => string.Join(null, w.Inlines.Select(Dump)));
RegisterDumpHandler<ListItem>(li => li.Prefix + "[" + string.Join(null, li.Inlines.Select(Dump)) + "]");
RegisterDumpHandler<Heading>(h =>
{
var expr = $"H{h.Level}[{string.Join(null, h.Inlines.Select(Dump))}]";
if (h.Suffix != null) expr += "[" + h.Suffix + "]";
return expr;
});
RegisterDumpHandler<Paragraph>(p => $"P[{string.Join(null, p.Inlines.Select(Dump))}]");
RegisterDumpHandler<Wikitext>(w => string.Join(null, w.Lines.Select(Dump)));
RegisterDumpHandler<ArgumentReference>(n =>
{
var s = "{{{" + Dump(n.Name);
if (n.DefaultValue != null) s += "|" + Dump(n.DefaultValue);
return s + "}}}";
});
RegisterDumpHandler<Template>(n =>
{
if (n.Arguments.Count == 0) return "{{" + Dump(n.Name) + "}}";
var sb = new StringBuilder("{{");
sb.Append(n.Name);
foreach (var arg in n.Arguments)
{
sb.Append('|');
sb.Append(Dump(arg));
}
sb.Append("}}");
return sb.ToString();
});
RegisterDumpHandler<TemplateArgument>(n =>
{
if (n.Name == null) return Dump(n.Value);
return Dump(n.Name) + "=" + Dump(n.Value);
});
RegisterDumpHandler<Comment>(n => n.ToString());
Func<TagNode, string> tagNodeHandler = n =>
{
var sb = new StringBuilder("<");
sb.Append(n.Name);
sb.Append(string.Join(null, n.Attributes.Select(Dump)));
sb.Append(n.Attributes.TrailingWhitespace);
switch (n.TagStyle)
{
case TagStyle.Normal:
case TagStyle.NotClosed:
sb.Append('>');
var pt = n as ParserTag;
if (pt != null) sb.Append(pt.Content);
var ht = n as HtmlTag;
if (ht != null) sb.Append(Dump(ht.Content));
break;
case TagStyle.SelfClosing:
sb.Append("/>");
return sb.ToString();
case TagStyle.CompactSelfClosing:
sb.Append(">");
return sb.ToString();
default:
Debug.Assert(false);
break;
}
sb.Append("</");
sb.Append(n.ClosingTagName ?? n.Name);
sb.Append(n.ClosingTagTrailingWhitespace);
sb.Append('>');
return sb.ToString();
};
RegisterDumpHandler<ParserTag>(tagNodeHandler);
RegisterDumpHandler<HtmlTag>(tagNodeHandler);
RegisterDumpHandler<TagAttribute>(n =>
{
string quote;
switch (n.Quote)
{
case ValueQuoteType.None:
quote = null;
break;
case ValueQuoteType.SingleQuotes:
quote = "'";
break;
case ValueQuoteType.DoubleQuotes:
quote = "\"";
break;
default:
throw new ArgumentOutOfRangeException();
}
return n.LeadingWhitespace + n.Name + n.WhitespaceBeforeEqualSign + "="
+ n.WhitespaceAfterEqualSign + quote + n.Value + quote;
});
}
public static string Dump(Node node)
{
if (node == null) return null;
return dumpHandlers[node.GetType()](node);
}
}
}
| apache-2.0 |
lutzfischer/XiSearch | src/main/java/rappsilber/ms/score/AutoValidation.java | 127549 | /*
* Copyright 2016 Lutz Fischer <l.fischer@ed.ac.uk>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rappsilber.ms.score;
import java.util.ArrayList;
import rappsilber.ms.spectra.match.MatchedXlinkedPeptide;
import rappsilber.utils.Util;
/**
* Basically I run weka on some training data and created randomtree and reptree
* classifies and if for both 7 out of ten agree that it is not a false positive
* a match gets flagged as auto-validated.
* @author Lutz Fischer <l.fischer@ed.ac.uk>
*/
public class AutoValidation extends AbstractScoreSpectraMatch {
abstract class tree {
public abstract int isFalsePositive(MatchedXlinkedPeptide match);
}
public final static String scorename = "Autovalidation";
private ArrayList<tree> randomTrees = new ArrayList<tree>(10);
private ArrayList<tree> repTrees = new ArrayList<tree>(10);
public AutoValidation() {
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("fragment unique matched conservative") < 13.5) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.17) {
if (match.getSpectrum().getPrecurserMass() < 2751.37) {
return 1;
} else {
if (match.getScore("fragment non lossy matched") < 15.5) {
return 1;
} else {
if (match.getScore("mgcAlpha") < 85.17) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 750.16) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("fragment matched conservative") < 10.5) {
if (match.getScore("spectra top100 matched%") < 0.25) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("mgcDelta") < 24.07) {
if (match.getScore("spectrum intensity coverage") < 0.32) {
return 1;
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.33) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("AverageRelativeMS2Error") < 0.22) {
if (match.getScore("SpectraCoverageConservative") < 0.22) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("peptide2 unique matched non lossy") < 3.5) {
if (match.getSpectrum().getPrecurserMass() < 4846.54) {
if (match.getScore("peptide2 unique matched") < 5.5) {
return 1;
} else {
if (match.getScore("spectra matched single%") < 0.12) {
return 0;
} else {
return 1;
}
}
} else {
return 1;
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.27) {
if (match.getScore("spectra intensity nonlossy coverage") < 0.51) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.81) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("spectrum intensity coverage") < 0.38) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.81) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 matched conservative") < 6.5) {
if (match.getScore("spectrum quality score") < 0.46) {
if (match.getScore("1-ErrorRelative") < 0.85) {
return 1;
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.31) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
} else {
if (match.getScore("spectra intensity nonlossy coverage") < 0.27) {
if (match.getScore("Pep2Score") < 0.35) {
if (match.getScore("mgcShiftedDelta") < 116.51) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("fragment non lossy matched") < 13.5) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.45) {
if (match.getScore("spectra top40 matched%") < 0.34) {
if (match.getScore("spectra top100 matched%") < 0.25) {
if (match.getScore("mgcBeta") < 30.63) {
return 1;
} else {
if (match.getScore("MeanSquareError") < 10.46) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.31) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.91) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.28) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 901.07) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2979.12) {
return 0;
} else {
return 1;
}
}
} else {
return 0;
}
}
} else {
if (match.getCalcMass() < 2373.72) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.23) {
if (match.getScore("FragmentLibraryScoreExponential") < 1) {
return 1;
} else {
if (match.getScore("mgxScore") < 192.46) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("FragmentLibraryScoreLog") < 18.45) {
if (match.getScore("spectrum quality score") < 0.38) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getCalcMass() < 2603.28) {
if (match.getSpectrum().getPrecurserMass() < 2586.79) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.34) {
if (match.getScore("peptide2 non lossy matched") < 5.5) {
if (match.getScore("FragmentLibraryScoreLog") < 56.32) {
return 1;
} else {
if (match.getScore("peptide2 unique matched conservative coverage") < 0.17) {
if (match.getScore("spectrum peaks coverage") < 0.57) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("peptide2 matched conservative") < 3.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("spectrum intensity coverage") < 0.42) {
if (match.getScore("1-ErrorRelative") < 0.86) {
return 1;
} else {
if (match.getScore("mgcDelta") < 32.36) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("peptide2 conservative coverage") < 0.39) {
if (match.getSpectrum().getPrecurserMass() < 4126.15) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("mgxDelta") < 0.68) {
if (match.getScore("spectrum quality score") < 0.47) {
if (match.getScore("MeanSquareError") < 33.82) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("total fragment matches") < 15.5) {
if (match.getScore("fragment matched conservative") < 10.5) {
if (match.getSpectrum().getPrecurserMass() < 2458.24) {
if (match.getScore("fragment multimatched%") < 0.07) {
if (match.getSpectrum().getPrecurserMass() < 1974.07) {
return 1;
} else {
if (match.getCalcMass() < 1974.55) {
return 0;
} else {
return 1;
}
}
} else {
return 1;
}
} else {
if (match.getCalcMass() < 2793.46) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 493.18) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 478.89) {
return 1;
} else {
return 0;
}
} else {
if (match.getCalcMass() < 2458.43) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.99) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("mgxDelta") < 6.26) {
if (match.getScore("peptide1 unique matched conservative coverage") < 0.56) {
if (match.getScore("fragment multimatched%") < 0.11) {
return 1;
} else {
if (match.getScore("mgcBeta") < 30.71) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 non lossy coverage") < 0.39) {
if (match.getSpectrum().getPrecurserMass() < 2749.02) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.48) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.35) {
return 1;
} else {
if (match.getScore("mgcBeta") < 9.5) {
return 1;
} else {
if (match.getScore("SpectraCoverageConservative") < 0.32) {
return 1;
} else {
return 0;
}
}
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.48) {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getScore("peptide1 coverage") < 0.53) {
if (match.getScore("peptide2 matched") < 11.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("peptide2 unique matched lossy") < 9.5) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide1 unique matched non lossy coverage") < 0.36) {
if (match.getScore("peptide2 non lossy coverage") < 0.37) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.29) {
if (match.getScore("FragmentLibraryScoreLog") < 56.03) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("1-ErrorRelative") < 0.82) {
return 1;
} else {
if (match.getScore("peptide2 unique matched non lossy") < 3.5) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 matched conservative") < 4.5) {
if (match.getSpectrum().getPrecurserMass() < 2362.17) {
if (match.getScore("fragment non lossy coverage") < 0.46) {
if (match.getScore("spectrum intensity coverage") < 0.57) {
return 1;
} else {
if (match.getScore("peptide2 unique matched non lossy") < 3.5) {
if (match.getCalcMass() < 2285.17) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("mgcDelta") < 8.55) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 407.32) {
return 1;
} else {
if (match.getScore("spectra matched single%") < 0.49) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgcDelta") < 34.57) {
return 1;
} else {
if (match.getScore("spectra top40 matched%") < 0.19) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
if (match.getScore("fragment conservative coverage") < 0.45) {
if (match.getScore("mgxScore") < 126.46) {
return 1;
} else {
if (match.getScore("FragmentLibraryScoreLog") < 53.96) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgxDelta") < 8.17) {
return 1;
} else {
if (match.getScore("AverageMS2Error") < 3.45) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("total fragment matches") < 19.5) {
if (match.getScore("FragmentLibraryScoreLog") < 49.69) {
return 1;
} else {
if (match.getScore("peptide1 lossy coverage") < 0.33) {
return 1;
} else {
return 0;
}
}
} else {
return 1;
}
}
}
} else {
if (match.getScore("FragmentLibraryScore") < 1) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.41) {
if (match.getScore("peptide2 unique matched") < 0.5) {
return 0;
} else {
if (match.getScore("spectrum intensity coverage") < 0.4) {
return 1;
} else {
if (match.getScore("peptide1 lossy matched") < 5.5) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("mgxScore") < 71.71) {
if (match.getScore("fragment unique matched non lossy") < 13.5) {
if (match.getCalcMass() < 2350.25) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("MeanSquareError") < 35.27) {
return 0;
} else {
if (match.getScore("spectrum peaks coverage") < 0.26) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("Pep2Score") < 0.3) {
if (match.getScore("mgcShiftedDelta") < 128.74) {
if (match.getScore("fragment non lossy matched") < 18.5) {
return 1;
} else {
if (match.getScore("mgcBeta") < 47.37) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.11) {
if (match.getScore("mgxDelta") < 3.86) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
return 0;
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.1) {
if (match.getScore("peptide1 matched") < 11.5) {
if (match.getScore("spectrum quality score") < 0.44) {
if (match.getScore("fragment conservative coverage") < 0.44) {
if (match.getScore("fragment lossy matched") < 12.5) {
return 1;
} else {
if (match.getScore("MeanSquareError") < 29.73) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2603.28) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getCalcMass() < 2316.66) {
if (match.getScore("fragment unique matched conservative coverage") < 0.43) {
if (match.getScore("peptide2 lossy matched") < 8) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("fragment conservative coverage") < 0.45) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("mgxScore") < 177.6) {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("betaCount") < 74.5) {
return 1;
} else {
if (match.getScore("fragment matched conservative") < 26.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("fragment non lossy matched") < 22.5) {
if (match.getScore("mgxRank") < 0.5) {
if (match.getScore("peptide2 lossy matched") < 5.5) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("MeanSquareRootError") < 5.79) {
return 0;
} else {
return 1;
}
}
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.41) {
if (match.getScore("fragment matched conservative") < 13.5) {
if (match.getScore("fragment unique matched conservative") < 9.5) {
return 1;
} else {
if (match.getScore("fragment unique matched lossy coverage") < 0.28) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 468.31) {
return 0;
} else {
return 1;
}
} else {
if (match.getScore("mgcShiftedDelta") < 102.87) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide1 coverage") < 0.41) {
if (match.getScore("peptide2 non lossy matched") < 6.5) {
return 1;
} else {
if (match.getScore("AverageMS2Error") < 4.73) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.19) {
if (match.getScore("mgxScore") < 144.16) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("mgcAlpha") < 97.03) {
return 0;
} else {
return 1;
}
}
}
}
} else {
if (match.getScore("peptide2 unique matched") < 4.5) {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("peptide2 unique matched conservative") < 2.5) {
return 1;
} else {
if (match.getScore("peptide1 non lossy coverage") < 0.38) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched lossy coverage") < 0.41) {
if (match.getScore("mgxDelta") < 0.43) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
if (match.getScore("peptide2 lossy coverage") < 0.21) {
if (match.getScore("total fragment matches") < 27) {
return 1;
} else {
return 0;
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2689.94) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("fragment matched conservative") < 13.5) {
if (match.getScore("mgcDelta") < 22.24) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 699.37) {
return 1;
} else {
if (match.getScore("Pep2Score") < 0.43) {
if (match.getScore("PrecoursorCharge") < 3.5) {
return 1;
} else {
if (match.getCalcMass() < 2793.6) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 5.5) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 699.55) {
return 0;
} else {
return 1;
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.39) {
return 0;
} else {
return 1;
}
}
}
}
} else {
if (match.getScore("fragment non lossy coverage") < 0.45) {
if (match.getScore("fragment conservative coverage") < 0.38) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.92) {
return 1;
} else {
if (match.getScore("peptide1 matched conservative") < 4.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("spectrum intensity coverage") < 0.24) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 763.85) {
if (match.getScore("PrecoursorCharge") < 4.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
if (match.getScore("fragment coverage") < 0.42) {
if (match.getSpectrum().getPrecurserMass() < 2919.44) {
if (match.getScore("peptide1 lossy coverage") < 0.08) {
if (match.getScore("peptide1 unique matched conservative") < 12.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getCalcMass() < 2336.28) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.59) {
return 1;
} else {
if (match.getScore("SpectraCoverageConservative") < 0.59) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 2.5) {
if (match.getScore("PrecoursorCharge") < 4.5) {
return 1;
} else {
if (match.getScore("spectra matched single%") < 0.48) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.2) {
return 1;
} else {
if (match.getScore("mgxDelta") < 7.07) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.28) {
if (match.getScore("peptide2 unique matched non lossy") < 6.5) {
if (match.getScore("mgcDelta") < 32.58) {
return 1;
} else {
if (match.getScore("spectra top100 matched%") < 0.27) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("spectra matched isotop%") < 0.51) {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.26) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("FragmentLibraryScore") < 1) {
if (match.getScore("spectrum intensity coverage") < 0.43) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.44) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("mgcBeta") < 29.95) {
if (match.getScore("spectrum quality score") < 0.45) {
if (match.getScore("fragment conservative coverage") < 0.43) {
if (match.getScore("mgxScore") < 180.22) {
return 1;
} else {
if (match.getScore("peptide1 multimatched%") < 0.24) {
if (match.getScore("peptide1 conservative coverage") < 0.47) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 3.5) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 698.63) {
return 1;
} else {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 932.1) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.23) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
if (match.getScore("lossy fragment matches") < 13.5) {
if (match.getScore("fragment unique matched conservative") < 18.5) {
return 1;
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.28) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.39) {
if (match.getScore("spectrum quality score") < 0.81) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.23) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.82) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.38) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.31) {
if (match.getScore("fragment unique matched conservative") < 14.5) {
if (match.getScore("Pep1Score") < 0.45) {
if (match.getScore("PrecoursorCharge") < 3.5) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 1129.58) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("spectrum quality score") < 0.6) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getScore("spectrum quality score") < 0.48) {
return 1;
} else {
if (match.getScore("MeanSquareRootError") < 4.3) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("mgcDelta") < 37) {
if (match.getScore("spectrum quality score") < 0.45) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.4) {
if (match.getScore("spectrum peaks coverage") < 0.22) {
if (match.getScore("fragment unique matched lossy coverage") < 0.4) {
if (match.getScore("fragment unique matched conservative coverage") < 0.43) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("MeanSquareError") < 52.95) {
if (match.getScore("peptide1 unique matched conservative") < 13.5) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 matched") < 4.5) {
if (match.getScore("mgxDelta") < 9.82) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("spectra top100 matched%") < 0.23) {
if (match.getScore("total fragment matches") < 15.5) {
if (match.getScore("fragment non lossy matched") < 10.5) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 684.09) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2782.47) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.61) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.4) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.22) {
return 1;
} else {
if (match.getScore("mgcBeta") < 49.62) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgxDelta") < 5) {
return 1;
} else {
if (match.getScore("spectra matched isotop%") < 0.43) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("fragment unique matched non lossy coverage") < 0.44) {
if (match.getScore("mgcShiftedDelta") < 118.93) {
if (match.getScore("peptide2 matched") < 11.5) {
if (match.getScore("1-ErrorRelative") < 0.99) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.88) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgcBeta") < 37.2) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
if (match.getScore("peptide2 conservative coverage") < 0.39) {
if (match.getScore("spectra matched single%") < 0.39) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.4) {
if (match.getScore("1-ErrorRelative") < 0.82) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.26) {
if (match.getScore("Pep2Score") < 0.23) {
if (match.getScore("total fragment matches") < 18.5) {
return 1;
} else {
if (match.getScore("peptide1 unique matched lossy coverage") < 0.05) {
if (match.getScore("mgcBeta") < 43.94) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
}
} else {
if (match.getScore("mgcBeta") < 49.15) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("spectra intensity nonlossy coverage") < 0.34) {
if (match.getScore("fragment matched conservative") < 12.5) {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2166.13) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("mgcDelta") < 27.75) {
if (match.getScore("peptide2 matched") < 4.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("mgxDelta") < 7.98) {
if (match.getScore("fragment non lossy matched") < 19.5) {
if (match.getScore("peptide2 lossy matched") < 3.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("total fragment matches") < 15.5) {
if (match.getScore("peptide2 conservative coverage") < 0.39) {
if (match.getCalcMass() < 2390.24) {
if (match.getScore("mgxDelta") < 8.51) {
if (match.getSpectrum().getPrecurserMass() < 1901.89) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 1902.01) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("spectrum peaks coverage") < 0.21) {
return 1;
} else {
if (match.getScore("peptide2 unique matched conservative") < 2.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("fragment non lossy coverage") < 0.37) {
return 1;
} else {
if (match.getCalcMass() < 3168.68) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("total fragment matches") < 12.5) {
if (match.getScore("spectra matched single%") < 0.36) {
if (match.getScore("PrecoursorCharge") < 3.5) {
if (match.getScore("peptide1 sequencetag coverage%") < 0.47) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("peptide1 conservative coverage") < 0.41) {
return 1;
} else {
if (match.getScore("FragmentLibraryScore") < 1) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("mgxDelta") < 5.97) {
return 1;
} else {
if (match.getScore("spectrum peaks coverage") < 0.22) {
if (match.getScore("peptide1 unique matched") < 7.5) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getScore("peptide2 matched") < 3.5) {
if (match.getScore("mgcBeta") < 31.41) {
if (match.getScore("PrecoursorCharge") < 5.5) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2752.44) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("mgcShiftedDelta") < 182.37) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.42) {
if (match.getScore("peptide2 unique matched conservative") < 0.5) {
if (match.getScore("mgcBeta") < 28.09) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("mgxDelta") < 7.14) {
return 1;
} else {
if (match.getScore("peptide2 conservative coverage") < 0.28) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 conservative coverage") < 0.27) {
if (match.getScore("mgxDelta") < -0.02) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.19) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
});
randomTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("fragment unique matched non lossy") < 13.5) {
if (match.getScore("fragment sequencetag coverage%") < 0.34) {
if (match.getSpectrum().getPrecurserMass() < 2458.24) {
if (match.getScore("fragment unique matched conservative") < 9.5) {
if (match.getScore("peptide2 matched") < 5.5) {
return 1;
} else {
if (match.getScore("SpectraCoverageConservative") < 0.51) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 3.5) {
return 1;
} else {
if (match.getScore("fragment unique matched lossy") < 13.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 6.5) {
return 1;
} else {
if (match.getScore("spectra matched single%") < 0.38) {
if (match.getScore("fragment conservative coverage") < 0.36) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 non lossy coverage") < 0.34) {
if (match.getSpectrum().getPrecurserMass() < 2791) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("mgcShiftedDelta") < 67.88) {
if (match.getScore("peptide2 lossy matched") < 3.5) {
if (match.getScore("PrecoursorCharge") < 4.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
if (match.getScore("fragment unique matched non lossy") < 12.5) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.27) {
if (match.getScore("mgcDelta") < 26.14) {
if (match.getScore("mgcBeta") < 40.58) {
if (match.getScore("spectra intensity nonlossy coverage") < 0.67) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("peptide1 matched conservative") < 15.5) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.21) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("spectra top40 matched%") < 0.24) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("peptide2 non lossy coverage") < 0.45) {
if (match.getScore("mgxDelta") < 7.76) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
}
}
} else {
if (match.getScore("mgcDelta") < 29.93) {
if (match.getScore("fragment unique matched conservative coverage") < 0.37) {
if (match.getScore("1-ErrorRelative") < 0.86) {
return 1;
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.22) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgcBeta") < 19.76) {
if (match.getScore("fragment non lossy coverage") < 0.47) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.18) {
if (match.getScore("fragment non lossy matched") < 21) {
if ((match.getCalcMass()/match.getSpectrum().getPrecurserCharge())+Util.PROTON_MASS < 1037.68) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.09) {
if (match.getSpectrum().getPrecurserMass() < 2269.16) {
if (match.getSpectrum().getPrecurserMass() < 1974.06) {
if (match.getPeptide1().length() <= 5) {
return 1;
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
return 1;
} else {
if (match.getScore("spectrum quality score") < 0.51) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide1 sequencetag coverage%") < 0.6) {
return 1;
} else {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("mgxDelta") < 4.8) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.6) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.41) {
if (match.getScore("fragment sequencetag coverage%") < 0.3) {
if (match.getSpectrum().getPrecurserMass() < 2360.73) {
return 1;
} else {
if (match.getScore("mgxDelta") < 23.58) {
if (match.getScore("peptide2 non lossy matched") < 9.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
return 1;
} else {
if (match.getScore("1-ErrorRelative") < 0.85) {
return 1;
} else {
if (match.getScore("mgcScore") < 62.82) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("1-ErrorRelative") < 0.85) {
return 1;
} else {
if (match.getScore("mgxDelta") < 7.05) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgxDelta") < 0.68) {
if (match.getScore("1-ErrorRelative") < 0.78) {
if (match.getScore("mgcDelta") < 41.08) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.11) {
if (match.getSpectrum().getPrecurserMass() < 2362.17) {
if (match.getSpectrum().getPrecurserMass() < 1898.09) {
return 1;
} else {
if (match.getScore("fragment matched conservative") < 13.5) {
if (match.getScore("mgcBeta") < 41.26) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
if (match.getSpectrum().getPrecurserMass() < 2362.64) {
return 0;
} else {
return 1;
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.6) {
return 0;
} else {
if (match.getScore("total fragment matches") < 19.5) {
return 1;
} else {
if (match.getScore("mgcDelta") < 31.36) {
return 1;
} else {
return 0;
}
}
}
}
}
} else {
if (match.getScore("total fragment matches") < 15.5) {
if (match.getScore("fragment non lossy coverage") < 0.42) {
if (match.getScore("mgcBeta") < 54.66) {
if (match.getSpectrum().getPrecurserMass() < 2743.94) {
if (match.getScore("spectra matched single%") < 0.41) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
return 0;
}
} else {
if (match.getScore("spectra top100 matched%") < 0.21) {
if (match.getScore("peptide2 unique matched") < 6.5) {
return 1;
} else {
if (match.getScore("mgxDelta") < 3.57) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgxDelta") < 5.87) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.39) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("PrecoursorAbsoluteErrorRelative") < 0.24) {
if (match.getScore("peptide2 matched conservative") < 5.5) {
if (match.getScore("mgxDelta") < 6.96) {
if (match.getScore("spectra top100 matched%") < 0.29) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("mgcBeta") < 36.99) {
if (match.getScore("mgxDelta") < 0.5) {
return 1;
} else {
if (match.getScore("fragment non lossy coverage") < 0.47) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getPeptide2().length() < 5.5) {
return 1;
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.3) {
if (match.getSpectrum().getPrecurserMass() < 2384.2) {
if (match.getScore("fragment non lossy coverage") < 0.47) {
return 1;
} else {
if (match.getScore("spectra top100 matched%") < 0.24) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 4766.05) {
return 1;
} else {
return 1;
}
}
} else {
if (match.getScore("spectrum intensity coverage") < 0.27) {
if (match.getPeptide2().length() < 7.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.7) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("spectrum intensity coverage") < 0.38) {
if (match.getScore("fragment non lossy coverage") < 0.43) {
if (match.getScore("mgcBeta") < 48.27) {
if (match.getScore("mgxDelta") < 20.59) {
if (match.getPeptide2().length() < 13.5) {
return 1;
} else {
return 1;
}
} else {
if (match.getScore("mgxScore") < 155.01) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.76) {
if (match.getScore("mgxDelta") < 18.4) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("spectra matched single%") < 0.18) {
if (match.getScore("mgcBeta") < 27.44) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.11) {
if (match.getScore("1-ErrorRelative") < 0.78) {
return 1;
} else {
if (match.getScore("mgxDelta") < 4.51) {
if (match.getScore("mgcBeta") < 50.44) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("1-ErrorRelative") < 0.72) {
if (match.getScore("mgxDelta") < -0.19) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getSpectrum().getPrecurserMass() < 2381.32) {
if (match.getScore("fragment sequencetag coverage%") < 0.36) {
return 1;
} else {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("spectra matched isotop%") < 0.41) {
return 1;
} else {
if (match.getScore("peptide2 unique matched conservative coverage") < 0.45) {
return 0;
} else {
return 1;
}
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.6) {
return 0;
} else {
if (match.getPeptide1().length() < 11.5) {
return 1;
} else {
return 1;
}
}
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.3) {
if (match.getScore("fragment coverage") < 0.56) {
if (match.getScore("peptide2 non lossy matched") < 9) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.76) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.2) {
if (match.getScore("spectra top100 matched%") < 0.3) {
if (match.getPeptide1().length() < 9.5) {
return 1;
} else {
if (match.getScore("mgcScore") < 114.73) {
return 1;
} else {
if (match.getScore("peptide2 unique matched conservative coverage") < 0.32) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("1-ErrorRelative") < 0.83) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("1-ErrorRelative") < 0.73) {
if (match.getScore("spectra intensity nonlossy coverage") < 0.39) {
if (match.getScore("peptide2 unique matched conservative") < 7.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("Pep2Score") < 0.34) {
if (match.getScore("spectra top100 matched%") < 0.27) {
if (match.getScore("mgcScore") < 115.82) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.11) {
if (match.getSpectrum().getPrecurserMZ() < 601.34) {
if (match.getSpectrum().getPrecurserMZ() < 394.02) {
return 1;
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
return 1;
} else {
if (match.getScore("fragment sequencetag coverage%") < 0.2) {
return 1;
} else {
if (match.getScore("mgcBeta") < 27.04) {
return 1;
} else {
return 0;
}
}
}
}
} else {
if (match.getScore("mgcBeta") < 47.92) {
if (match.getSpectrum().getPrecurserMZ() < 699.39) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMZ() < 699.41) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("peptide2 matched conservative") < 5.5) {
if (match.getScore("peptide2 unique matched lossy coverage") < 0.21) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.43) {
if (match.getScore("mgxScore") < 115.88) {
if (match.getScore("fragment sequencetag coverage%") < 0.34) {
return 1;
} else {
if (match.getScore("mgcBeta") < 18.4) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgcBeta") < 29.87) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.44) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("mgxDelta") < 9.78) {
if (match.getScore("spectra intensity nonlossy coverage") < 0.17) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("mgxDelta") < 7.97) {
if (match.getScore("peptide2 non lossy matched") < 3.5) {
return 1;
} else {
if (match.getScore("spectra top100 matched%") < 0.28) {
return 1;
} else {
return 0;
}
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getPeptide1().length() < 11.5) {
if (match.getPeptide1().length() < 7.5) {
return 1;
} else {
if (match.getScore("PrecoursorCharge") < 3.5) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMZ() < 689.1) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMZ() < 699.65) {
return 0;
} else {
return 1;
}
}
}
}
} else {
return 1;
}
} else {
if (match.getScore("spectra intensity nonlossy coverage") < 0.3) {
if (match.getScore("fragment unique matched non lossy coverage") < 0.41) {
if (match.getScore("peptide2 non lossy matched") < 9.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("Precoursor Absolute Error") < 1.88) {
if (match.getScore("FragmentLibraryScoreExponential") < 0.98) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("peptide2 non lossy matched") < 8.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("Precoursor Absolute Error") < 1.08) {
if (match.getScore("peptide2 conservative coverage") < 0.23) {
if (match.getScore("spectra intensity nonlossy coverage") < 0.5) {
if (match.getScore("Precoursor Error") < 0.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("mgxRank") < 1.5) {
if (match.getScore("peptide2 coverage") < 0.42) {
if (match.getScore("mgxScore") < 146.38) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("fragment unique matched lossy coverage") < 0.29) {
return 1;
} else {
if (match.getScore("peptide2 non lossy matched") < 6.5) {
return 1;
} else {
return 0;
}
}
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.11) {
if (match.getPeptide2().length() < 6.5) {
if (match.getPeptide2().length() < 5.5) {
return 1;
} else {
if (match.getScore("peptide2 matched conservative") < 4.5) {
return 1;
} else {
if (match.getScore("mgxDelta") < 4.36) {
if (match.getScore("spectra top100 matched%") < 0.28) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("PrecoursorCharge") < 3.5) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMZ() < 580.76) {
return 1;
} else {
if (match.getPeptide2().length() < 14.5) {
return 1;
} else {
if (match.getScore("fragment lossy matched") < 22) {
return 1;
} else {
return 0;
}
}
}
}
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.32) {
if (match.getScore("fragment unique matched conservative coverage") < 0.42) {
if (match.getScore("mgxDelta") < 23.35) {
if (match.getScore("peptide2 lossy matched") < 10.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("fragment non lossy matched") < 10.5) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 matched") < 5.5) {
if (match.getScore("mgxDelta") < 13.88) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getScore("mgxDelta") < 8.23) {
if (match.getPeptide2().length() < 5.5) {
return 1;
} else {
if (match.getScore("peptide2 sequencetag coverage%") < 0.32) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("fragment unique matched non lossy coverage") < 0.36) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("mgxRank") < 0.5) {
if (match.getScore("FragmentLibraryScoreLog") < 15.99) {
if (match.getScore("spectra top100 matched%") < 0.21) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.09) {
if (match.getSpectrum().getPrecurserMass() < 2269.16) {
if (match.getSpectrum().getPrecurserMass() < 1611.36) {
return 1;
} else {
if (match.getScore("SpectraCoverageConservative") < 0.48) {
return 1;
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
return 1;
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.61) {
return 0;
} else {
return 1;
}
}
}
} else {
if (match.getScore("SpectraCoverageConservative") < 0.32) {
if (match.getScore("SpectraCoverageConservative") < 0.25) {
return 1;
} else {
if (match.getScore("peptide2 unique matched non lossy") < 6.5) {
if (match.getScore("fragment sequencetag coverage%") < 0.3) {
return 1;
} else {
if (match.getScore("peptide2 matched") < 5.5) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("1-ErrorRelative") < 0.84) {
if (match.getScore("spectra top100 matched%") < 0.18) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy") < 4.5) {
if (match.getScore("1-ErrorRelative") < 0.82) {
if (match.getScore("FragmentLibraryScoreLog") < 58.46) {
return 1;
} else {
if (match.getScore("peptide1 coverage") < 0.52) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("spectra top100 matched%") < 0.3) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("1-ErrorRelative") < 0.74) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.34) {
if (match.getScore("mgcDelta") < 23.87) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 unique matched conservative") < 4.5) {
if (match.getSpectrum().getPrecurserMass() < 2283.27) {
if (match.getScore("spectrum quality score") < 0.45) {
return 1;
} else {
if (match.getScore("peptide2 matched conservative") < 3.5) {
if (match.getScore("spectra matched isotop%") < 0.38) {
return 0;
} else {
return 1;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.73) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("fragment matched conservative") < 16.5) {
if (match.getSpectrum().getPrecurserMass() < 2793.51) {
if (match.getSpectrum().getPrecurserMZ() < 509.38) {
if (match.getSpectrum().getPrecurserMZ() < 508.87) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getSpectrum().getPrecurserMass() < 2793.6) {
return 0;
} else {
return 1;
}
}
} else {
if (match.getScore("peptide2 matched conservative") < 3.5) {
if (match.getPeptide1().length() < 19.5) {
if (match.getPeptide1().length() < 11.5) {
return 0;
} else {
return 1;
}
} else {
return 1;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.73) {
return 1;
} else {
if (match.getScore("Pep2Score") < 0.31) {
return 1;
} else {
return 0;
}
}
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.42) {
if (match.getScore("fragment unique matched conservative coverage") < 0.41) {
if (match.getScore("peptide2 matched conservative") < 9.5) {
return 1;
} else {
return 0;
}
} else {
if (match.getScore("1-ErrorRelative") < 0.71) {
return 1;
} else {
return 0;
}
}
} else {
if (match.getScore("peptide2 unique matched non lossy coverage") < 0.26) {
if (match.getScore("spectrum peaks coverage") < 0.45) {
if (match.getScore("Precoursor Absolute Error") < 1.37) {
return 0;
} else {
return 1;
}
} else {
return 0;
}
} else {
if (match.getScore("Precoursor Absolute Error") < 1.68) {
return 0;
} else {
if (match.getScore("spectrum quality score") < 0.49) {
if (match.getScore("peptide2 unique matched conservative") < 7.5) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
}
}
}
}
}
});
repTrees.add(new tree() {
public int isFalsePositive(MatchedXlinkedPeptide match) {
if (match.getScore("peptide2 sequencetag coverage%") < 0.09) {
if (match.getPeptide2().length() < 6.5) {
return 1;
} else {
if (match.getPeptide1().length() < 8.5) {
return 1;
} else {
if (match.getPeptide1().length() < 14.5) {
if (match.getSpectrum().getPrecurserMZ() < 702.36) {
if (match.getSpectrum().getPrecurserMZ() < 699.37) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
return 1;
}
}
}
} else {
if (match.getScore("spectrum quality score") < 0.43) {
if (match.getScore("total fragment matches") < 14.5) {
return 1;
} else {
if (match.getScore("PrecoursorAbsoluteErrorRelative") < 0.17) {
if (match.getScore("mgxDelta") < 6.56) {
if (match.getScore("peptide1 sequencetag coverage%") < 0.27) {
return 1;
} else {
return 0;
}
} else {
return 0;
}
} else {
if (match.getScore("mgxDelta") < 22.37) {
return 1;
} else {
return 0;
}
}
}
} else {
if (match.getScore("peptide2 non lossy matched") < 4.5) {
if (match.getScore("PrecoursorAbsoluteErrorRelative") < 0.21) {
if (match.getPeptide2().length() < 5.5) {
return 1;
} else {
return 0;
}
} else {
return 1;
}
} else {
if (match.getScore("PrecoursorAbsoluteErrorRelative") < 0.31) {
return 0;
} else {
if (match.getScore("mgxDelta") < -0.19) {
return 1;
} else {
return 0;
}
}
}
}
}
}
});
}
@Override
public String[] scoreNames() {
return new String[] {scorename};
}
public double score(MatchedXlinkedPeptide match) {
if (match.getPeptides().length != 2) {
addScore(match, scorename, 0);
return 0;
}
int FPRandom = 0;
for (tree t :randomTrees) {
FPRandom+= t.isFalsePositive(match);
}
int FPRep = 0;
for (tree t :randomTrees) {
FPRep+= t.isFalsePositive(match);
}
if (FPRep <=3 && FPRandom <=3) {
addScore(match, "Autovalidation", 1);
return 1;
}
addScore(match, "Autovalidation", 0);
return 0;
}
public double getOrder() {
return 100001;
}
}
| apache-2.0 |
studiodev/archives | 2009 - Team D4 (IxGamer)/include/Editor/assetmanager/language/spanish/foldernew.js | 591 | function getText(s)
{
switch(s)
{
case "Folder already exists.": return "La carpeta ya existe.";
case "Folder created.": return "Carpeta creada.";
case "Invalid input.":return "Nombre no valido.";
}
}
function loadText()
{
document.getElementById("txtLang").innerHTML = "Nombre nueva carpeta";
document.getElementById("btnCloseAndRefresh").value = "Cerrar y actualizar";
document.getElementById("btnCreate").value = "Crear";
}
function writeTitle()
{
document.write("<title>Crear Carpeta</title>")
}
| apache-2.0 |
benmfaul/XRTB | src/com/xrtb/tools/MaxLoad.java | 2313 | package com.xrtb.tools;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import com.xrtb.common.HttpPostGet;
/**
* Test program for loading maximum bids into a RTB4FREE bidder.
* @author Ben M. Faul
*
*/
public class MaxLoad implements Runnable {
String fileName = "SampleBids/nexage.txt";
String url = "http://localhost:8080/rtb/bids/nexage";
HttpPostGet post = new HttpPostGet();
String content;
String host;
Thread me;
static double count = 0;
public static void main(String [] args) throws Exception {
int threads = 10;
int i = 0;
String host = "localhost";
String port = "8080";
String exchange = "nexage";
while(i<args.length) {
switch(args[i]) {
case "-h":
System.out.println("-h [This message ]");
System.out.println("-host host-or-ip [Where to send the bid (default is localhost]");
System.out.println("-port n [Port number, default is 8080 ]");
System.out.println("-exchange name [Name of exchange, default is nexage ]");
System.out.println("-threads n [How many threads (default=10) ]");
case "-host":
host = args[i+1];
i+=2;
break;
case "-port":
port = args[i+1];
i+= 2;
break;
case "-exchange":
exchange = args[i+1];
i+=2;
break;
case "-threads":
threads = Integer.parseInt(args[i+1]);
i+=2;
break;
default:
System.err.println("Huh? " + args[i]);
}
}
i=0;
while(true) {
if (i < threads) {
new MaxLoad(host,port,exchange);
i++;
}
count = 0;
Thread.sleep(2000);
double x = count/2000;
System.out.println("Threads="+i + ", QPS=" + count/2);
}
}
public MaxLoad(String host, String port, String exchange) throws Exception {
this.host = host;
url = "http://" + host + ":" + port + "/rtb/bids/" + exchange;
content = new String(Files.readAllBytes(Paths
.get(fileName)), StandardCharsets.UTF_8);
me = new Thread(this);
me.start();
}
public void run() {
while(true) {
try {
String rc = post.sendPost(url, content,1000,1000);
post = new HttpPostGet();
count++;
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
| apache-2.0 |
kuzmordas/akuznetsov | chapter_002/src/test/java/ru/job4j/shape/PaintTest.java | 1780 | package ru.job4j.shape;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* Paint test.
* @author Aleksandr Kuznetsov
* @version $Id$
* @since 0.1
*/
public class PaintTest {
/**
* Square draw test.
*/
@Test
public void whenDrawSquare() {
PrintStream stdout = System.out;
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
new Paint().draw(new Square());
assertThat(
new String(out.toString()),
is(
new StringBuilder()
.append("+---+\n")
.append("| |\n")
.append("+---+\n")
.append(System.lineSeparator())
.toString()
)
);
System.setOut(stdout);
}
/**
* Triangle draw test.
*/
@Test
public void whenDrawTriangle() {
PrintStream stdout = System.out;
ByteArrayOutputStream out = new ByteArrayOutputStream();
System.setOut(new PrintStream(out));
new Paint().draw(new Triangle());
assertThat(
new String(out.toString()),
is(
new StringBuilder()
.append(" * \n")
.append(" *** \n")
.append("*****\n")
.append(System.lineSeparator())
.toString()
)
);
System.setOut(stdout);
}
}
| apache-2.0 |
s-webber/projog | src/main/java/org/projog/core/predicate/builtin/io/Open.java | 2599 | /*
* Copyright 2013 S. Webber
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projog.core.predicate.builtin.io;
import static org.projog.core.term.TermUtils.getAtomName;
import org.projog.core.ProjogException;
import org.projog.core.predicate.AbstractSingleResultPredicate;
import org.projog.core.term.Atom;
import org.projog.core.term.Term;
/* TEST
%LINK prolog-io
*/
/**
* <code>open(X,Y,Z)</code> - opens a file.
* <p>
* <code>X</code> is an atom representing the name of the file to open. <code>Y</code> is an atom that should have
* either the value <code>read</code> to open the file for reading from or <code>write</code> to open the file for
* writing to. <code>Z</code> is instantiated by <code>open</code> to a special term that must be referred to in
* subsequent commands in order to access the stream.
* </p>
*/
public final class Open extends AbstractSingleResultPredicate {
private static final String READ = "read";
private static final String WRITE = "write";
@Override
protected boolean evaluate(Term fileNameAtom, Term operationAtom, Term variableToAssignTo) {
String operation = getAtomName(operationAtom);
String fileName = getAtomName(fileNameAtom);
Atom handle;
if (READ.equals(operation)) {
handle = openInput(fileName);
} else if (WRITE.equals(operation)) {
handle = openOutput(fileName);
} else {
throw new ProjogException("Second argument is not '" + READ + "' or '" + WRITE + "' but: " + operation);
}
variableToAssignTo.unify(handle);
return true;
}
private Atom openInput(String fileName) {
try {
return getFileHandles().openInput(fileName);
} catch (Exception e) {
throw new ProjogException("Unable to open input for: " + fileName, e);
}
}
private Atom openOutput(String fileName) {
try {
return getFileHandles().openOutput(fileName);
} catch (Exception e) {
throw new ProjogException("Unable to open output for: " + fileName + " " + e, e);
}
}
}
| apache-2.0 |
streamingpool/streamingpool-core | src/java/org/streamingpool/core/service/streamid/CompositionStreamId.java | 4390 | // @formatter:off
/**
*
* This file is part of streaming pool (http://www.streamingpool.org).
*
* Copyright (c) 2017-present, CERN. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// @formatter:on
package org.streamingpool.core.service.streamid;
import java.io.Serializable;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import org.reactivestreams.Publisher;
import org.streamingpool.core.service.StreamId;
/**
* Generic implementation of {@link StreamId} which in conjunction with the
* {@link org.streamingpool.core.service.streamfactory.CompositionStreamFactory} allows for the easy creation of
* general purpose streams based on composition of streams. This class is experimental.
*
* @param <X> The type of objects emitted by the source {@link org.reactivestreams.Publisher}s.
* @param <T> The type of objects emitted by the new created {@link org.reactivestreams.Publisher}.
* @author timartin
*/
@Deprecated
public final class CompositionStreamId<X, T> implements StreamId<T>, Serializable {
private static final long serialVersionUID = 1L;
private final List<StreamId<X>> sourceStreamIds;
private final Function<List<Publisher<X>>, Publisher<T>> transformation;
/**
* Creates a {@link CompositionStreamId} with the provided sourceStreamId and function.
*
* @param sourceStreamId A {@link StreamId} that identifies the {@link org.reactivestreams.Publisher} passed to the
* transformation function.
* @param transformation The transformation {@link Function} to be used on the {@link org.reactivestreams.Publisher} identified by
* the provided {@link StreamId}.
*/
public CompositionStreamId(StreamId<X> sourceStreamId, Function<List<Publisher<X>>, Publisher<T>> transformation) {
this(Collections.singletonList(sourceStreamId), transformation);
}
/**
* Creates a {@link CompositionStreamId} with the provided sourceStreamIds and function.
*
* @param sourceStreamIds A {@link List} of {@link StreamId}s that will identifies the {@link org.reactivestreams.Publisher} passed
* to the transformation function.
* @param transformation The transformation {@link Function} to be used on the {@link org.reactivestreams.Publisher}s identified by
* the provided {@link List} of {@link StreamId}s.
*/
public CompositionStreamId(List<StreamId<X>> sourceStreamIds,
Function<List<Publisher<X>>, Publisher<T>> transformation) {
this.sourceStreamIds = sourceStreamIds;
this.transformation = transformation;
}
public List<StreamId<X>> sourceStreamIds() {
return sourceStreamIds;
}
public Function<List<Publisher<X>>, Publisher<T>> transformation() {
return transformation;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CompositionStreamId<?, ?> that = (CompositionStreamId<?, ?>) o;
if (sourceStreamIds != null ? !sourceStreamIds.equals(that.sourceStreamIds) : that.sourceStreamIds != null)
return false;
return transformation != null ? transformation.equals(that.transformation) : that.transformation == null;
}
@Override
public int hashCode() {
int result = sourceStreamIds != null ? sourceStreamIds.hashCode() : 0;
result = 31 * result + (transformation != null ? transformation.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "CompositionStreamId{" +
"sourceStreamIds=" + sourceStreamIds +
", transformation=" + transformation +
'}';
}
}
| apache-2.0 |
socibo/socibo-camel-loader | src/main/java/co/socibo/social/quickstart/user/SimpleConnectionSignUp.java | 1315 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package co.socibo.social.quickstart.user;
import java.util.concurrent.atomic.AtomicLong;
import org.springframework.social.connect.Connection;
import org.springframework.social.connect.ConnectionSignUp;
/**
* Simple little {@link ConnectionSignUp} command that allocates new userIds in memory.
* Doesn't bother storing a user record in any local database, since this quickstart just stores the user id in a cookie.
* @author Keith Donald
*/
public final class SimpleConnectionSignUp implements ConnectionSignUp {
private final AtomicLong userIdSequence = new AtomicLong();
public String execute(Connection<?> connection) {
return Long.toString(userIdSequence.incrementAndGet());
}
}
| apache-2.0 |
romartin/kie-wb-common | kie-wb-common-widgets/kie-wb-common-ui/src/test/java/org/kie/workbench/common/widgets/client/assets/dropdown/KogitoKieAssetsDropdownTest.java | 5073 | /*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.widgets.client.assets.dropdown;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import com.google.gwtmockito.GwtMockitoTestRunner;
import elemental2.dom.HTMLElement;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.uberfire.mvp.Command;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(GwtMockitoTestRunner.class)
public class KogitoKieAssetsDropdownTest extends AbstractKieAssetsDropdownTest {
@Mock
private Consumer<List<KieAssetsDropdownItem>> kieAssetsConsumer;
@Mock
private KogitoKieAssetsDropdownView viewlocalMock;
private KieAssetsDropdown dropdownLocal;
@Before
public void setup() {
dropdownLocal = spy(new KogitoKieAssetsDropdown(viewlocalMock, dataProviderMock) {
{
onValueChangeHandler = onValueChangeHandlerMock;
this.kieAssets.addAll(assetList);
}
});
commonSetup();
}
@Test
public void testRegisterOnChangeHandler() {
final Command command = mock(Command.class);
getDropdown().registerOnChangeHandler(command);
getDropdown().onValueChanged();
verify(command).execute();
}
@Test
public void testLoadAssetsWhenEnvIsNotKogito() {
doReturn(kieAssetsConsumer).when((KogitoKieAssetsDropdown) getDropdown()).getAssetListConsumer();
getDropdown().loadAssets();
verify(getDropdown()).clear();
verify(viewlocalMock).enableDropdownMode();
verify(dataProviderMock).getItems(kieAssetsConsumer);
}
@Test
public void testInitialize() {
getDropdown().initialize();
verify(getViewMock()).refreshSelectPicker();
}
@Test
public void testGetElement() {
final HTMLElement expectedElement = mock(HTMLElement.class);
when(getViewMock().getElement()).thenReturn(expectedElement);
final HTMLElement actualElement = getDropdown().getElement();
assertEquals(expectedElement, actualElement);
}
@Test
public void testGetValue() {
final List<KieAssetsDropdownItem> kieAssets = IntStream.range(0, 4).mapToObj(i -> {
final KieAssetsDropdownItem toReturn = mock(KieAssetsDropdownItem.class);
when(toReturn.getValue()).thenReturn("item" + i);
return toReturn;
}).collect(Collectors.toList());
when(getViewMock().getValue()).thenReturn("item2");
((KogitoKieAssetsDropdown) getDropdown()).kieAssets.clear();
((KogitoKieAssetsDropdown) getDropdown()).kieAssets.addAll(kieAssets);
final Optional<KieAssetsDropdownItem> retrieved = getDropdown().getValue();
assertTrue(retrieved.isPresent());
assertEquals("item2", retrieved.get().getValue());
}
@Test
public void testGetValueWhenOptionDoesNotExist() {
((KogitoKieAssetsDropdown) getDropdown()).kieAssets.clear();
assertFalse(getDropdown().getValue().isPresent());
}
@Test
public void getAssetListConsumer() {
final List<KieAssetsDropdownItem> expectedDropdownItems = new ArrayList<>();
((KogitoKieAssetsDropdown) getDropdown()).getAssetListConsumer().accept(expectedDropdownItems);
verify(((KogitoKieAssetsDropdown) getDropdown()), times(1)).assetListConsumerMethod(eq(expectedDropdownItems));
}
@Test
public void assetListConsumerMethod() {
((KogitoKieAssetsDropdown) getDropdown()).assetListConsumerMethod(assetList);
assetList.forEach(item -> verify(getViewMock()).addValue(item));
verify(getViewMock()).refreshSelectPicker();
verify(getViewMock()).initialize();
}
@Override
protected KieAssetsDropdown getDropdown() {
return dropdownLocal;
}
@Override
protected KieAssetsDropdown.View getViewMock() {
return viewlocalMock;
}
}
| apache-2.0 |
macchina-io/macchina.io | platform/Util/testsuite/src/ConfigurationMapperTest.cpp | 6667 | //
// ConfigurationMapperTest.cpp
//
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
// and Contributors.
//
// SPDX-License-Identifier: BSL-1.0
//
#include "ConfigurationMapperTest.h"
#include "CppUnit/TestCaller.h"
#include "CppUnit/TestSuite.h"
#include "Poco/Util/ConfigurationMapper.h"
#include "Poco/Util/MapConfiguration.h"
#include "Poco/AutoPtr.h"
#include "Poco/Exception.h"
#include <algorithm>
using Poco::Util::AbstractConfiguration;
using Poco::Util::ConfigurationMapper;
using Poco::Util::MapConfiguration;
using Poco::AutoPtr;
ConfigurationMapperTest::ConfigurationMapperTest(const std::string& name): AbstractConfigurationTest(name)
{
}
ConfigurationMapperTest::~ConfigurationMapperTest()
{
}
void ConfigurationMapperTest::testMapper1()
{
AutoPtr<AbstractConfiguration> pConf = createConfiguration();
AutoPtr<AbstractConfiguration> pMapper = new ConfigurationMapper("", "", pConf);
assertTrue (pMapper->hasProperty("prop5.string1"));
assertTrue (pMapper->hasProperty("prop5.string1"));
AbstractConfiguration::Keys keys;
pMapper->keys(keys);
assertTrue (keys.size() == 13);
assertTrue (std::find(keys.begin(), keys.end(), "prop5") != keys.end());
pMapper->keys("prop5", keys);
assertTrue (keys.size() == 4);
assertTrue (std::find(keys.begin(), keys.end(), "string1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "string2") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub2") != keys.end());
assertTrue (pMapper->getString("prop5.string1") == "foo");
assertTrue (pMapper->getString("prop5.sub1.string1") == "FOO");
pMapper->setString("prop5.string3", "baz");
assertTrue (pMapper->getString("prop5.string3") == "baz");
assertTrue (pConf->getString("prop5.string3") == "baz");
pMapper->remove("prop5.string3");
assertTrue (!pMapper->hasProperty("prop5.string3"));
assertTrue (!pConf->hasProperty("prop5.string3"));
}
void ConfigurationMapperTest::testMapper2()
{
AutoPtr<AbstractConfiguration> pConf = createConfiguration();
AutoPtr<AbstractConfiguration> pMapper = new ConfigurationMapper("prop5", "root.conf", pConf);
assertTrue (pMapper->hasProperty("root.conf.string1"));
assertTrue (pMapper->hasProperty("root.conf.string2"));
AbstractConfiguration::Keys keys;
pMapper->keys(keys);
assertTrue (keys.size() == 1);
assertTrue (std::find(keys.begin(), keys.end(), "root") != keys.end());
pMapper->keys("root", keys);
assertTrue (keys.size() == 1);
assertTrue (std::find(keys.begin(), keys.end(), "conf") != keys.end());
pMapper->keys("root.conf", keys);
assertTrue (keys.size() == 4);
assertTrue (std::find(keys.begin(), keys.end(), "string1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "string2") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub2") != keys.end());
assertTrue (pMapper->getString("root.conf.string1") == "foo");
assertTrue (pMapper->getString("root.conf.sub1.string1") == "FOO");
pMapper->setString("root.conf.string3", "baz");
assertTrue (pMapper->getString("root.conf.string3") == "baz");
assertTrue (pConf->getString("prop5.string3") == "baz");
pMapper->remove("root.conf.string3");
assertTrue (!pMapper->hasProperty("root.conf.string3"));
assertTrue (!pConf->hasProperty("prop5.string3"));
}
void ConfigurationMapperTest::testMapper3()
{
AutoPtr<AbstractConfiguration> pConf = createConfiguration();
AutoPtr<AbstractConfiguration> pMapper = new ConfigurationMapper("", "root", pConf);
assertTrue (pMapper->hasProperty("root.prop5.string1"));
assertTrue (pMapper->hasProperty("root.prop5.string2"));
AbstractConfiguration::Keys keys;
pMapper->keys(keys);
assertTrue (keys.size() == 1);
assertTrue (std::find(keys.begin(), keys.end(), "root") != keys.end());
pMapper->keys("root", keys);
assertTrue (keys.size() == 13);
assertTrue (std::find(keys.begin(), keys.end(), "prop5") != keys.end());
pMapper->keys("root.prop5", keys);
assertTrue (keys.size() == 4);
assertTrue (std::find(keys.begin(), keys.end(), "string1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "string2") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub2") != keys.end());
assertTrue (pMapper->getString("root.prop5.string1") == "foo");
assertTrue (pMapper->getString("root.prop5.sub1.string1") == "FOO");
pMapper->setString("root.prop5.string3", "baz");
assertTrue (pMapper->getString("root.prop5.string3") == "baz");
assertTrue (pConf->getString("prop5.string3") == "baz");
pMapper->remove("root.prop5.string3");
assertTrue (!pMapper->hasProperty("root.prop5.string3"));
assertTrue (!pConf->hasProperty("prop5.string3"));
}
void ConfigurationMapperTest::testMapper4()
{
AutoPtr<AbstractConfiguration> pConf = createConfiguration();
AutoPtr<AbstractConfiguration> pMapper = new ConfigurationMapper("prop5", "", pConf);
assertTrue (pMapper->hasProperty("string1"));
assertTrue (pMapper->hasProperty("string2"));
AbstractConfiguration::Keys keys;
pMapper->keys(keys);
assertTrue (keys.size() == 4);
assertTrue (std::find(keys.begin(), keys.end(), "string1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "string2") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub1") != keys.end());
assertTrue (std::find(keys.begin(), keys.end(), "sub2") != keys.end());
assertTrue (pMapper->getString("string1") == "foo");
assertTrue (pMapper->getString("sub1.string1") == "FOO");
pMapper->setString("string3", "baz");
assertTrue (pMapper->getString("string3") == "baz");
assertTrue (pConf->getString("prop5.string3") == "baz");
pMapper->remove("string3");
assertTrue (!pMapper->hasProperty("string3"));
assertTrue (!pConf->hasProperty("prop5.string3"));
}
AbstractConfiguration::Ptr ConfigurationMapperTest::allocConfiguration() const
{
return new MapConfiguration;
}
void ConfigurationMapperTest::setUp()
{
}
void ConfigurationMapperTest::tearDown()
{
}
CppUnit::Test* ConfigurationMapperTest::suite()
{
CppUnit::TestSuite* pSuite = new CppUnit::TestSuite("ConfigurationMapperTest");
AbstractConfigurationTest_addTests(pSuite, ConfigurationMapperTest);
CppUnit_addTest(pSuite, ConfigurationMapperTest, testMapper1);
CppUnit_addTest(pSuite, ConfigurationMapperTest, testMapper2);
CppUnit_addTest(pSuite, ConfigurationMapperTest, testMapper3);
CppUnit_addTest(pSuite, ConfigurationMapperTest, testMapper4);
return pSuite;
}
| apache-2.0 |
rabsouza/arcadia-caller | src/main/java/br/com/battista/arcadia/caller/validator/EntityValidator.java | 1348 | package br.com.battista.arcadia.caller.validator;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import br.com.battista.arcadia.caller.exception.ValidatorException;
import br.com.battista.arcadia.caller.model.BaseEntity;
import lombok.extern.slf4j.Slf4j;
@Component
@Slf4j
public class EntityValidator {
@Autowired
@Qualifier("hibernateValidator")
private Validator validator;
public void validate(BaseEntity entity) {
if (entity == null || entity.getClass() == null) {
log.warn("Validating a null entity!");
}
Set<ConstraintViolation<BaseEntity>> violations = validator.validate(entity);
if (CollectionUtils.isNotEmpty(violations) && entity != null && entity.getClass() != null) {
String message = String.format("Constraint validation error to entity: %s",
entity.getClass().getSimpleName());
log.error(message);
log.error("Violations: [{}]", violations);
throw new ValidatorException(violations, message);
}
}
}
| apache-2.0 |
digitalocean/doctl | commands/balance_test.go | 1306 | /*
Copyright 2018 The Doctl Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package commands
import (
"testing"
"time"
"github.com/digitalocean/doctl/do"
"github.com/digitalocean/godo"
"github.com/stretchr/testify/assert"
)
var testBalance = &do.Balance{
Balance: &godo.Balance{
MonthToDateBalance: "23.44",
AccountBalance: "12.23",
MonthToDateUsage: "11.21",
GeneratedAt: time.Date(2018, 6, 21, 8, 44, 38, 0, time.UTC),
},
}
func TestBalanceCommand(t *testing.T) {
acctCmd := Balance()
assert.NotNil(t, acctCmd)
assertCommandNames(t, acctCmd, "get")
}
func TestBalanceGet(t *testing.T) {
withTestClient(t, func(config *CmdConfig, tm *tcMocks) {
tm.balance.EXPECT().Get().Return(testBalance, nil)
err := RunBalanceGet(config)
assert.NoError(t, err)
})
}
| apache-2.0 |
Happypig375/InnoTech-eLearning | InnoTecheLearning/InnoTecheLearning/InnoTecheLearning/Components/StreamPlayerLegacy.cs | 3969 | using System.IO;
using System.Threading.Tasks;
using static InnoTecheLearning.Utils;
namespace InnoTecheLearning
{
/// <summary>
/// Used to be the <see cref="StreamPlayer"/> between 0.10.0a51 to 0.10.0a64
/// </summary>
public class StreamPlayerLegacy : ISoundPlayer, System.IDisposable
{
public enum Sounds : byte
{Violin_G,
Violin_D,
Violin_A,
Violin_E,
Cello_C,
Cello_G,
Cello_D,
Cello_A}
public static StreamPlayerLegacy PlayAsync(Sounds Sound, double Volume = 1)
{
string Name = "";
switch (Sound)
{
case Sounds.Violin_G:
Name = "ViolinG.wav";
break;
case Sounds.Violin_D:
Name = "ViolinD.wav";
break;
case Sounds.Violin_A:
Name = "ViolinA.wav";
break;
case Sounds.Violin_E:
Name = "ViolinE.wav";
break;
case Sounds.Cello_C:
Name = "CelloCC.wav";
break;
case Sounds.Cello_G:
Name = "CelloGG.wav";
break;
case Sounds.Cello_D:
Name = "CelloD.wav";
break;
case Sounds.Cello_A:
Name = "CelloA.wav";
break;
default:
break;
}
return Create(Resources.GetStream("Sounds." + Name), true, Volume);
}
public static StreamPlayerLegacy Play(Sounds Sound, double Volume = 1)
{ return Play(Sound, Volume);}
private StreamPlayerLegacy() { }
SoundPlayer _Player;
string File;
public static StreamPlayerLegacy Create(Stream Stream, bool Loop = false, double Volume = 1)
{
var Return = new StreamPlayerLegacy();
Return.Init(Stream, Loop, Volume);
return Return;
}
protected void Init(Stream Stream, bool Loop, double Volume)
{
File = Temp.TempFile;
Temp.SaveStream(File, Stream);
_Player = SoundPlayer.Create(File, Loop, Volume);
}
public void Play()
{ _Player.Play(); }
public void Pause()
{ _Player.Pause(); }
public void Stop()
{ _Player.Stop(); }
public event System.EventHandler Complete
{ add { _Player.Complete += value; } remove { _Player.Complete -= value; } }
#region IDisposable Support
private bool disposedValue = false; // To detect redundant calls
protected virtual void Dispose(bool disposing)
{
if (!disposedValue)
{
if (disposing)
{
// TODO: dispose managed state (managed objects).
}
// TODO: free unmanaged resources (unmanaged objects) and override a finalizer below.
Temp.Delete(File);
// TODO: set large fields to null.
_Player = null;
disposedValue = true;
}
}
// TODO: override a finalizer only if Dispose(bool disposing) above has code to free unmanaged resources.
~StreamPlayerLegacy() {
// Do not change this code. Put cleanup code in Dispose(bool disposing) above.
Dispose(false);
}
// This code added to correctly implement the disposable pattern.
public void Dispose()
{
// Do not change this code. Put cleanup code in Dispose(bool disposing) above.
Dispose(true);
// TODO: uncomment the following line if the finalizer is overridden above.
// System.GC.SuppressFinalize(this);
}
#endregion
}
} | apache-2.0 |
y3seker/EgeYemekhaneMobil | app/src/main/java/com/y3seker/egeyemekhanemobil/utils/ParseUtils.java | 3454 | /*
* Copyright 2015 Yunus Emre Şeker. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.y3seker.egeyemekhanemobil.utils;
import com.y3seker.egeyemekhanemobil.constants.ParseConstants;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import java.util.HashMap;
/**
* Created by Yunus Emre Şeker on 2.11.2015.
* -
*/
public final class ParseUtils {
// From document
public static HashMap<String, String> extractViewState(Document document) throws NullPointerException {
HashMap<String, String> result = new HashMap<>();
extractViewState(result, document);
return result;
}
public static void extractViewState(HashMap<String, String> viewStates, Document document) throws NullPointerException {
viewStates.put(ParseConstants.VIEW_STATE, document.getElementById(ParseConstants.VIEW_STATE).val());
viewStates.put(ParseConstants.VIEW_STATE_GEN, document.getElementById(ParseConstants.VIEW_STATE_GEN).val());
Element eventVal = document.getElementById(ParseConstants.EVENT_VAL);
if (eventVal == null) return;
viewStates.put(ParseConstants.EVENT_VAL, document.getElementById(ParseConstants.EVENT_VAL).val());
}
public static String getUserName(Document document) {
return document.getElementById(ParseConstants.USERS_NAME).text();
}
public static boolean isLoginPage(Document doc) {
return doc.getElementById(ParseConstants.LOGIN) != null;
}
public static boolean isBlockedPaged(Document doc) {
return doc.getElementById("box") != null;
}
public static boolean isOrderWarningPage(Document doc) {
return doc.select("form").first().attr("action").equals("./hata.aspx?no=1");
}
public static boolean isLoginSucceed(Document doc) {
return doc.getElementById(ParseConstants.LOGIN) == null;
}
// From string
public static HashMap<String, String> extractViewState(String rawHTML) {
HashMap<String, String> result = new HashMap<>();
Document doc = Jsoup.parse(rawHTML);
try {
result.put(ParseConstants.VIEW_STATE, doc.getElementById(ParseConstants.VIEW_STATE).val());
result.put(ParseConstants.VIEW_STATE_GEN, doc.getElementById(ParseConstants.VIEW_STATE_GEN).val());
result.put(ParseConstants.EVENT_VAL, doc.getElementById(ParseConstants.EVENT_VAL).val());
} catch (NullPointerException e) {
return null;
}
return result;
}
public static boolean isLoginSuccess(String rawHTML) {
return !isLoginPage(rawHTML);
}
public static boolean isLoginPage(String rawHTML) {
return isLoginPage(Jsoup.parse(rawHTML));
}
public static String getUserName(String rawHTML) {
return Jsoup.parse(rawHTML).getElementById(ParseConstants.USERS_NAME).text();
}
}
| apache-2.0 |
MegafonWebLab/histone-java2 | spring/src/test/java/ru/histone/v2/spring/resource/loader/ServletContextLoaderTest.java | 3905 | package ru.histone.v2.spring.resource.loader;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.AnnotationConfigWebContextLoader;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.web.context.WebApplicationContext;
import ru.histone.v2.evaluator.resource.Resource;
import ru.histone.v2.evaluator.resource.loader.Loader;
import ru.histone.v2.spring.view.config.HistoneTestConfig;
import javax.annotation.PostConstruct;
import java.net.URI;
import java.util.Collections;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import static ru.histone.v2.spring.resource.loader.ClassPathLoaderTest.createCtx;
/**
* @author Aleksander Melnichnikov
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = HistoneTestConfig.class, loader = AnnotationConfigWebContextLoader.class)
@WebAppConfiguration("/target/test-classes/webapp")
public class ServletContextLoaderTest {
@Autowired
protected WebApplicationContext webApplicationContext;
private Loader servletContextLoader;
@PostConstruct
public void initLoader() {
servletContextLoader = new ServletContextLoader(webApplicationContext.getServletContext());
}
@Test
@SuppressWarnings("unchecked")
public void testLoadResource_existing_0() throws Exception {
URI existingResourceURI = URI.create("/WEB-INF/templates/testTemplate.tpl");
Resource existingResource = (Resource) servletContextLoader.loadResource(createCtx(), existingResourceURI,
Collections.EMPTY_MAP).join();
assertNotNull(existingResource);
}
@Test
@SuppressWarnings("unchecked")
public void testLoadResource_existing_1() throws Exception {
URI existingResourceURI = URI.create("WEB-INF/templates/testTemplate.tpl");
Resource existingResource = (Resource) servletContextLoader.loadResource(createCtx(), existingResourceURI,
Collections.EMPTY_MAP).join();
assertNotNull(existingResource);
}
@Test
@SuppressWarnings("unchecked")
public void testLoadResource_notExisting() throws Exception {
URI notExistingResourceURI = URI.create("servletContextResourceNotExist.tpl");
try {
servletContextLoader.loadResource(createCtx(), notExistingResourceURI,
Collections.EMPTY_MAP).join();
fail("Exception should be thrown");
} catch (Exception ex) {
assertThat(ex.getMessage(),
is("Error with ResourceInputStream for file 'servletContextResourceNotExist.tpl'"));
}
}
@Test
@SuppressWarnings("unchecked")
public void testLoadResource_badSchema_0() throws Exception {
URI badSchemaURI = URI.create("classPath:/WEB-INF/templates/testTemplate.tpl");
Resource resource = (Resource) servletContextLoader.loadResource(createCtx(), badSchemaURI,
Collections.EMPTY_MAP).join();
assertNull(resource);
}
@Test
@SuppressWarnings("unchecked")
public void testLoadResource_badSchema_1() throws Exception {
URI badSchemaURI = URI.create("file:/WEB-INF/templates/testTemplate.tpl");
Resource resource = (Resource) servletContextLoader.loadResource(createCtx(), badSchemaURI,
Collections.EMPTY_MAP).join();
assertNull(resource);
}
} | apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_3891.java | 151 | package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_3891 {
}
| apache-2.0 |
dialectsoftware/DialectSoftware.Networking | Examples/BroadcastClient/Program.cs | 1035 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Net;
using System.Threading;
using DialectSoftware.Networking;
using System.Net.NetworkInformation;
namespace DialectSoftware
{
class Program
{
static DialectSoftware.Networking.BroadcastClient client = new Networking.BroadcastClient();
static void Main(string[] args)
{
Console.WriteLine("//****This program accepts console input***//");
//client.Receive += new Networking.AsyncNetworkCallBack(client_Receive);
ConsoleKeyInfo result;
while ((result = Console.ReadKey()).Key != ConsoleKey.Escape)
{
client.Send(1300, System.Text.ASCIIEncoding.ASCII.GetBytes(result.KeyChar.ToString()), -1);
}
}
static void client_Receive(Networking.AsyncNetworkAdapter adapter)
{
Console.WriteLine(System.Text.ASCIIEncoding.ASCII.GetString(adapter.Buffer));
}
}
}
| apache-2.0 |
lsimons/phloc-schematron-standalone | phloc-commons/src/test/java/com/phloc/commons/charset/CharsetManagerTest.java | 9805 | /**
* Copyright (C) 2006-2013 phloc systems
* http://www.phloc.com
* office[at]phloc[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phloc.commons.charset;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.Arrays;
import org.junit.Ignore;
import org.junit.Test;
import com.phloc.commons.io.streams.NonBlockingBufferedReader;
import com.phloc.commons.io.streams.NonBlockingByteArrayInputStream;
import com.phloc.commons.io.streams.StreamUtils;
import com.phloc.commons.random.VerySecureRandom;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* Test class for class {@link CharsetManager}.
*
* @author Philip Helger
*/
public final class CharsetManagerTest
{
@Test
@SuppressFBWarnings (value = "NP_NONNULL_PARAM_VIOLATION")
public void testCharsetFromName ()
{
assertNotNull (CharsetManager.getCharsetFromName ("UTF-8"));
try
{
CharsetManager.getCharsetFromName (null);
fail ();
}
catch (final IllegalArgumentException ex)
{}
try
{
// Illegal name
CharsetManager.getCharsetFromName ("does not exist");
fail ();
}
catch (final IllegalArgumentException ex)
{}
try
{
// Unsupported
CharsetManager.getCharsetFromName ("bla");
fail ();
}
catch (final IllegalArgumentException ex)
{}
}
@SuppressWarnings ("deprecation")
@Test
@SuppressFBWarnings ("NP_NONNULL_PARAM_VIOLATION")
public void testGetAsBytes ()
{
final String s = "äbc";
assertEquals (3, CharsetManager.getAsBytes (s, CCharset.CHARSET_ISO_8859_1).length);
assertEquals (4, CharsetManager.getAsBytes (s, CCharset.CHARSET_UTF_8).length);
try
{
CharsetManager.getAsBytes (s, (String) null);
fail ();
}
catch (final IllegalArgumentException ex)
{}
try
{
CharsetManager.getAsBytes (s, "");
fail ();
}
catch (final IllegalArgumentException ex)
{}
try
{
CharsetManager.getAsBytes (s, "bla");
fail ();
}
catch (final IllegalArgumentException ex)
{}
}
@Test
public void testGetAsBytesCharset ()
{
final String s = "äbc";
assertEquals (3, CharsetManager.getAsBytes (s, CCharset.CHARSET_ISO_8859_1_OBJ).length);
assertEquals (4, CharsetManager.getAsBytes (s, CCharset.CHARSET_UTF_8_OBJ).length);
try
{
CharsetManager.getAsBytes (s, (Charset) null);
fail ();
}
catch (final NullPointerException ex)
{}
}
@SuppressWarnings ("deprecation")
@Test
public void testGetAsStringInOtherCharset ()
{
final String s = "äbc";
assertEquals (3, CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_ISO_8859_1, CCharset.CHARSET_UTF_8)
.length ());
assertEquals (4, CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_UTF_8, CCharset.CHARSET_ISO_8859_1)
.length ());
assertNull (CharsetManager.getAsStringInOtherCharset (null, CCharset.CHARSET_ISO_8859_1, CCharset.CHARSET_UTF_8));
assertEquals (s,
CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_ISO_8859_1, CCharset.CHARSET_ISO_8859_1));
assertEquals (s, CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_UTF_8, CCharset.CHARSET_UTF_8));
try
{
CharsetManager.getAsStringInOtherCharset (s, null, CCharset.CHARSET_UTF_8);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_ISO_8859_1, null);
fail ();
}
catch (final NullPointerException ex)
{}
}
@Test
public void testGetAsStringInOtherCharsetCharset ()
{
final String s = "äbc";
assertEquals (3,
CharsetManager.getAsStringInOtherCharset (s,
CCharset.CHARSET_ISO_8859_1_OBJ,
CCharset.CHARSET_UTF_8_OBJ).length ());
assertEquals (4,
CharsetManager.getAsStringInOtherCharset (s,
CCharset.CHARSET_UTF_8_OBJ,
CCharset.CHARSET_ISO_8859_1_OBJ).length ());
assertNull (CharsetManager.getAsStringInOtherCharset (null,
CCharset.CHARSET_ISO_8859_1_OBJ,
CCharset.CHARSET_UTF_8_OBJ));
assertEquals (s, CharsetManager.getAsStringInOtherCharset (s,
CCharset.CHARSET_ISO_8859_1_OBJ,
CCharset.CHARSET_ISO_8859_1_OBJ));
assertEquals (s,
CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_UTF_8_OBJ, CCharset.CHARSET_UTF_8_OBJ));
try
{
CharsetManager.getAsStringInOtherCharset (s, null, CCharset.CHARSET_UTF_8_OBJ);
fail ();
}
catch (final NullPointerException ex)
{}
try
{
CharsetManager.getAsStringInOtherCharset (s, CCharset.CHARSET_ISO_8859_1_OBJ, null);
fail ();
}
catch (final NullPointerException ex)
{}
}
@SuppressWarnings ("deprecation")
@Test
public void testGreek () throws Exception
{
final String sAlpha = "?\u03B1";
byte [] b = CharsetManager.getAsBytes (sAlpha, CCharset.CHARSET_UTF_8);
assertEquals (sAlpha, CharsetManager.getAsString (b, CCharset.CHARSET_UTF_8));
assertEquals (sAlpha, CharsetManager.getAsString (b, CCharset.CHARSET_UTF_8_OBJ));
try
{
CharsetManager.getAsString (b, "charset bla");
fail ();
}
catch (final IllegalArgumentException ex)
{}
b = CharsetManager.getAsBytes (sAlpha, CCharset.CHARSET_UTF_8_OBJ);
assertEquals (sAlpha, CharsetManager.getAsString (b, CCharset.CHARSET_UTF_8));
assertEquals (sAlpha, CharsetManager.getAsString (b, CCharset.CHARSET_UTF_8_OBJ));
NonBlockingBufferedReader aReader = new NonBlockingBufferedReader (new InputStreamReader (new NonBlockingByteArrayInputStream (b),
CCharset.CHARSET_UTF_8));
assertEquals (sAlpha, aReader.readLine ());
StreamUtils.close (aReader);
aReader = new NonBlockingBufferedReader (new InputStreamReader (new NonBlockingByteArrayInputStream (b),
CCharset.CHARSET_UTF_8_OBJ));
assertEquals (sAlpha, aReader.readLine ());
StreamUtils.close (aReader);
}
@Test
public void testJavaCompiledAsUTF8 ()
{
final String s = "ä";
if (s.length () != 1)
throw new IllegalStateException ("Seems like the Java Source files were not compiled with UTF-8 encoding!");
}
@Test
public void testGetUTF8ByteCount ()
{
assertEquals (0, CharsetManager.getUTF8ByteCount ((String) null));
assertEquals (0, CharsetManager.getUTF8ByteCount ((char []) null));
assertEquals (2, CharsetManager.getUTF8ByteCount ("\0"));
assertEquals (2, CharsetManager.getUTF8ByteCount ("ä"));
assertEquals (2, CharsetManager.getUTF8ByteCount ('ä'));
assertEquals (0, CharsetManager.getUTF8ByteCount (""));
assertEquals (3, CharsetManager.getUTF8ByteCount ("abc"));
assertEquals (9, CharsetManager.getUTF8ByteCount ("abcäöü"));
assertEquals (3, CharsetManager.getUTF8ByteCount ("\ud7ff"));
assertEquals (0, CharsetManager.getUTF8ByteCount ("\udfff"));
assertEquals (0, CharsetManager.getUTF8ByteCount ("\ue000"));
assertEquals (0, CharsetManager.getUTF8ByteCount ("\uffff"));
try
{
CharsetManager.getUTF8ByteCount (0x110000);
fail ();
}
catch (final IllegalArgumentException ex)
{}
try
{
CharsetManager.getUTF8ByteCount (0x10000);
fail ();
}
catch (final IllegalArgumentException ex)
{}
}
@Test
@Ignore
public void testGetUTF8ByteCountRandom ()
{
for (int i = 0; i < 1000; i++)
{
// Build random String with 20 chars
final int nStringLen = 20;
final StringBuilder aSB = new StringBuilder ();
for (int x = 0; x < nStringLen; ++x)
{
final int c = VerySecureRandom.getInstance ().nextInt (Character.MIN_HIGH_SURROGATE);
aSB.append ((char) c);
}
// Count
final int nCounted = CharsetManager.getUTF8ByteCount (aSB.toString ());
assertTrue (nCounted >= nStringLen);
// Convert and count
final byte [] b = CharsetManager.getAsBytes (aSB.toString (), CCharset.CHARSET_UTF_8_OBJ);
assertTrue (b.length >= nStringLen);
// Must be equals
assertEquals (Arrays.toString (b), nCounted, b.length);
}
}
@Test
public void testBasic ()
{
assertNotNull (CharsetManager.getAllCharsets ());
assertTrue (CharsetManager.getAllCharsets ().size () > 0);
}
}
| apache-2.0 |
support-project/knowledge | src/main/java/org/support/project/knowledge/dao/MailPostsDao.java | 605 | package org.support.project.knowledge.dao;
import org.support.project.di.Container;
import org.support.project.di.DI;
import org.support.project.di.Instance;
import org.support.project.knowledge.dao.gen.GenMailPostsDao;
/**
* メールから投稿
*/
@DI(instance = Instance.Singleton)
public class MailPostsDao extends GenMailPostsDao {
/** SerialVersion */
private static final long serialVersionUID = 1L;
/**
* Get instance from DI container.
* @return instance
*/
public static MailPostsDao get() {
return Container.getComp(MailPostsDao.class);
}
}
| apache-2.0 |
JeroMiya/hifi-typescript | HiFi/examples/crazylegs.js | 1448 | //
// crazylegs.js
// examples
//
// Created by Andrzej Kapolka on 3/6/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var FREQUENCY = 5.0;
var AMPLITUDE = 45.0;
var cumulativeTime = 0.0;
var jointList = MyAvatar.getJointNames();
var jointMappings = "\n# Joint list start";
for (var i = 0; i < jointList.length; i++) {
jointMappings = jointMappings + "\njointIndex = " + jointList[i] + " = " + i;
}
print(jointMappings + "\n# Joint list end");
Script.update.connect(function(deltaTime) {
cumulativeTime += deltaTime;
MyAvatar.setJointData("RightUpLeg", Quat.fromPitchYawRollDegrees(AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY), 0.0, 0.0));
MyAvatar.setJointData("LeftUpLeg", Quat.fromPitchYawRollDegrees(-AMPLITUDE * Math.sin(cumulativeTime * FREQUENCY), 0.0, 0.0));
MyAvatar.setJointData("RightLeg", Quat.fromPitchYawRollDegrees(
AMPLITUDE * (1.0 + Math.sin(cumulativeTime * FREQUENCY)),0.0, 0.0));
MyAvatar.setJointData("LeftLeg", Quat.fromPitchYawRollDegrees(
AMPLITUDE * (1.0 - Math.sin(cumulativeTime * FREQUENCY)),0.0, 0.0));
});
Script.scriptEnding.connect(function() {
MyAvatar.clearJointData("RightUpLeg");
MyAvatar.clearJointData("LeftUpLeg");
MyAvatar.clearJointData("RightLeg");
MyAvatar.clearJointData("LeftLeg");
});
| apache-2.0 |
JonasSyrstad/Stardust.Interstellar.Rest | Stardust.Interstellar.Test/App_Start/FilterConfig.cs | 281 | using System.Web;
using System.Web.Mvc;
namespace Stardust.Interstellar.Test
{
public class FilterConfig
{
public static void RegisterGlobalFilters(GlobalFilterCollection filters)
{
filters.Add(new HandleErrorAttribute());
}
}
}
| apache-2.0 |
EpicMinecraftModders/EquivalentExperienceExchange | src/main/java/src/com/github/epicminecraftmodders/eee/TabEEE.java | 323 | package com.github.epicminecraftmodders.eee;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
public class TabEEE extends CreativeTabs {
public TabEEE(int index, String label) {
super(index, label);
}
@Override
public Item getTabIconItem() {
return ModEEE.experienceDiamond;
}
}
| apache-2.0 |
InjectMe/InjectMe | src/InjectMe.Tests/Activation/TransientHierarchicalActivationTests.cs | 1239 | using InjectMe.Construction;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace InjectMe.Tests.Activation
{
[TestClass]
public class TransientHierarchicalActivationTests : TestBase
{
[TestMethod]
public void HierarchicalTransientServicesShouldActivateTheSameInstance()
{
// Arrange
var container = Container.Create(c =>
{
c.RegisterInstance(new ConstructionFactorySettings { UsePropertyInjection = true });
c.RegisterTransient<Foo>();
c.RegisterTransient<Bar>();
c.RegisterTransient<Baz>();
});
// Act
var foo = container.ServiceLocator.Resolve<Foo>();
var bar = container.ServiceLocator.Resolve<Bar>();
var baz = container.ServiceLocator.Resolve<Baz>();
// Assert
Assert.AreSame(foo.Baz, foo.Bar.Baz);
Assert.AreNotSame(bar.Baz, baz);
}
class Foo
{
public Bar Bar { get; set; }
public Baz Baz { get; set; }
}
class Bar
{
public Baz Baz { get; set; }
}
class Baz
{
}
}
} | apache-2.0 |
bmwcarit/joynr | java/core/libjoynr/src/main/java/io/joynr/proxy/DiscoverySettingsStorage.java | 2864 | /*
* #%L
* %%
* Copyright (C) 2020 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.proxy;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import io.joynr.messaging.ConfigurableMessagingSettings;
import io.joynr.util.ObjectMapper;
import joynr.system.DiscoveryAsync;
public class DiscoverySettingsStorage {
private final ProxyBuilderFactory proxyBuilderFactory;
private final ObjectMapper objectMapper;
private final DiscoveryAsync localDiscoveryAggregator;
private final long maxMessagingTtl;
private final long defaultDiscoveryTimeoutMs;
private final long defaultDiscoveryRetryIntervalMs;
@Inject
// CHECKSTYLE IGNORE ParameterNumber FOR NEXT 1 LINES
public DiscoverySettingsStorage(ProxyBuilderFactory proxyBuilderFactory,
ObjectMapper objectMapper,
DiscoveryAsync localDiscoveryAggregator,
@Named(ConfigurableMessagingSettings.PROPERTY_MESSAGING_MAXIMUM_TTL_MS) long maxMessagingTtl,
@Named(ConfigurableMessagingSettings.PROPERTY_DISCOVERY_DEFAULT_TIMEOUT_MS) long defaultDiscoveryTimeoutMs,
@Named(ConfigurableMessagingSettings.PROPERTY_DISCOVERY_DEFAULT_RETRY_INTERVAL_MS) long defaultDiscoveryRetryIntervalMs) {
this.proxyBuilderFactory = proxyBuilderFactory;
this.objectMapper = objectMapper;
this.localDiscoveryAggregator = localDiscoveryAggregator;
this.maxMessagingTtl = maxMessagingTtl;
this.defaultDiscoveryTimeoutMs = defaultDiscoveryTimeoutMs;
this.defaultDiscoveryRetryIntervalMs = defaultDiscoveryRetryIntervalMs;
}
public ProxyBuilderFactory getProxyBuilderFactory() {
return proxyBuilderFactory;
}
public ObjectMapper getObjectMapper() {
return objectMapper;
}
public DiscoveryAsync getLocalDiscoveryAggregator() {
return localDiscoveryAggregator;
}
public long getMaxMessagingTtl() {
return maxMessagingTtl;
}
public long getDefaultDiscoveryTimeoutMs() {
return defaultDiscoveryTimeoutMs;
}
public long getDefaultDiscoveryRetryIntervalMs() {
return defaultDiscoveryRetryIntervalMs;
}
}
| apache-2.0 |
Nodstuff/hapi-fhir | hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/valuesets/AuditSourceTypeEnumFactory.java | 3278 | package org.hl7.fhir.instance.model.valuesets;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0
import org.hl7.fhir.instance.model.EnumFactory;
public class AuditSourceTypeEnumFactory implements EnumFactory<AuditSourceType> {
public AuditSourceType fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
return null;
if ("1".equals(codeString))
return AuditSourceType._1;
if ("2".equals(codeString))
return AuditSourceType._2;
if ("3".equals(codeString))
return AuditSourceType._3;
if ("4".equals(codeString))
return AuditSourceType._4;
if ("5".equals(codeString))
return AuditSourceType._5;
if ("6".equals(codeString))
return AuditSourceType._6;
if ("7".equals(codeString))
return AuditSourceType._7;
if ("8".equals(codeString))
return AuditSourceType._8;
if ("9".equals(codeString))
return AuditSourceType._9;
throw new IllegalArgumentException("Unknown AuditSourceType code '"+codeString+"'");
}
public String toCode(AuditSourceType code) {
if (code == AuditSourceType._1)
return "1";
if (code == AuditSourceType._2)
return "2";
if (code == AuditSourceType._3)
return "3";
if (code == AuditSourceType._4)
return "4";
if (code == AuditSourceType._5)
return "5";
if (code == AuditSourceType._6)
return "6";
if (code == AuditSourceType._7)
return "7";
if (code == AuditSourceType._8)
return "8";
if (code == AuditSourceType._9)
return "9";
return "?";
}
}
| apache-2.0 |
metaborg/spoofax | org.metaborg.spoofax.core/src/main/java/org/metaborg/spoofax/core/syntax/SourceAttachment.java | 2488 | package org.metaborg.spoofax.core.syntax;
import static org.spoofax.jsglr.client.imploder.ImploderAttachment.hasImploderOrigin;
import static org.spoofax.terms.attachments.OriginAttachment.getOrigin;
import org.apache.commons.vfs2.FileObject;
import org.metaborg.core.resource.IResourceService;
import org.spoofax.interpreter.terms.ISimpleTerm;
import org.spoofax.jsglr.client.imploder.ImploderAttachment;
import org.spoofax.terms.attachments.AbstractTermAttachment;
import org.spoofax.terms.attachments.ParentAttachment;
import org.spoofax.terms.attachments.TermAttachmentType;
import org.spoofax.terms.attachments.VolatileTermAttachmentType;
/**
* A tree-wide source resource and parse controller attachment.
*
* Uses {@link ParentAttachment} to identify the root of a tree, where this attachment is stored.
*/
public class SourceAttachment extends AbstractTermAttachment {
private static final long serialVersionUID = -8114392265614382463L;
public static final TermAttachmentType<SourceAttachment> TYPE = new VolatileTermAttachmentType<>(
SourceAttachment.class);
private transient final FileObject resource;
private SourceAttachment(FileObject resource) {
this.resource = resource;
}
public FileObject getFile() {
return resource;
}
public TermAttachmentType<SourceAttachment> getAttachmentType() {
return TYPE;
}
public static FileObject getResource(ISimpleTerm term, IResourceService resourceService) {
final SourceAttachment resource = ParentAttachment.getRoot(term).getAttachment(TYPE);
if(resource != null) {
return resource.resource;
}
while(!hasImploderOrigin(term) && term.getSubtermCount() > 0) {
term = term.getSubterm(0);
}
if(term.getAttachment(ImploderAttachment.TYPE) == null) {
term = getOrigin(term);
}
if(term == null || term.getAttachment(ImploderAttachment.TYPE) == null) {
return null;
}
final String fileName = ImploderAttachment.getFilename(term);
return resourceService.resolve(fileName);
}
/**
* Sets the resource for a term tree. Should only be applied to the root of a tree.
*/
public static void putSource(ISimpleTerm term, FileObject resource) {
ISimpleTerm root = ParentAttachment.getRoot(term);
assert term == root;
root.putAttachment(new SourceAttachment(resource));
}
}
| apache-2.0 |
karussell/fastutil | src/it/unimi/dsi/fastutil/bytes/ByteHeapIndirectPriorityQueue.java | 8290 | /* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/*
* Copyright (C) 2003-2013 Paolo Boldi and Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.bytes;
import it.unimi.dsi.fastutil.ints.IntArrays;
import java.util.NoSuchElementException;
/** A type-specific heap-based indirect priority queue.
*
* <P>Instances of this class use an additional <em>inversion array</em>, of the same length of the reference array,
* to keep track of the heap position containing a given element of the reference array. The priority queue is
* represented using a heap. The heap is enlarged as needed, but it is never
* shrunk. Use the {@link #trim()} method to reduce its size, if necessary.
*
* <P>This implementation does <em>not</em> allow one to enqueue several times the same index.
*/
public class ByteHeapIndirectPriorityQueue extends ByteHeapSemiIndirectPriorityQueue {
/** The inversion array. */
protected int inv[];
/** Creates a new empty queue with a given capacity and comparator.
*
* @param refArray the reference array.
* @param capacity the initial capacity of this queue.
* @param c the comparator used in this queue, or <code>null</code> for the natural order.
*/
public ByteHeapIndirectPriorityQueue( byte[] refArray, int capacity, ByteComparator c ) {
super( refArray, capacity, c );
if ( capacity > 0 ) this.heap = new int[ capacity ];
this.refArray = refArray;
this.c = c;
this.inv = new int[ refArray.length ];
IntArrays.fill( inv, -1 );
}
/** Creates a new empty queue with a given capacity and using the natural order.
*
* @param refArray the reference array.
* @param capacity the initial capacity of this queue.
*/
public ByteHeapIndirectPriorityQueue( byte[] refArray, int capacity ) {
this( refArray, capacity, null );
}
/** Creates a new empty queue with capacity equal to the length of the reference array and a given comparator.
*
* @param refArray the reference array.
* @param c the comparator used in this queue, or <code>null</code> for the natural order.
*/
public ByteHeapIndirectPriorityQueue( byte[] refArray, ByteComparator c ) {
this( refArray, refArray.length, c );
}
/** Creates a new empty queue with capacity equal to the length of the reference array and using the natural order.
* @param refArray the reference array.
*/
public ByteHeapIndirectPriorityQueue( byte[] refArray ) {
this( refArray, refArray.length, null );
}
/** Wraps a given array in a queue using a given comparator.
*
* <P>The queue returned by this method will be backed by the given array.
* The first <code>size</code> element of the array will be rearranged so to form a heap (this is
* more efficient than enqueing the elements of <code>a</code> one by one).
*
* @param refArray the reference array.
* @param a an array of indices into <code>refArray</code>.
* @param size the number of elements to be included in the queue.
* @param c the comparator used in this queue, or <code>null</code> for the natural order.
*/
public ByteHeapIndirectPriorityQueue( final byte[] refArray, final int[] a, final int size, final ByteComparator c ) {
this( refArray, 0, c );
this.heap = a;
this.size = size;
int i = size;
while( i-- != 0 ) {
if ( inv[ a[ i ] ] != -1 ) throw new IllegalArgumentException( "Index " + a[ i ] + " appears twice in the heap" );
inv[ a[ i ] ] = i;
}
ByteIndirectHeaps.makeHeap( refArray, a, inv, size, c );
}
/** Wraps a given array in a queue using a given comparator.
*
* <P>The queue returned by this method will be backed by the given array.
* The elements of the array will be rearranged so to form a heap (this is
* more efficient than enqueing the elements of <code>a</code> one by one).
*
* @param refArray the reference array.
* @param a an array of indices into <code>refArray</code>.
* @param c the comparator used in this queue, or <code>null</code> for the natural order.
*/
public ByteHeapIndirectPriorityQueue( final byte[] refArray, final int[] a, final ByteComparator c ) {
this( refArray, a, a.length, c );
}
/** Wraps a given array in a queue using the natural order.
*
* <P>The queue returned by this method will be backed by the given array.
* The first <code>size</code> element of the array will be rearranged so to form a heap (this is
* more efficient than enqueing the elements of <code>a</code> one by one).
*
* @param refArray the reference array.
* @param a an array of indices into <code>refArray</code>.
* @param size the number of elements to be included in the queue.
*/
public ByteHeapIndirectPriorityQueue( final byte[] refArray, final int[] a, int size ) {
this( refArray, a, size, null );
}
/** Wraps a given array in a queue using the natural order.
*
* <P>The queue returned by this method will be backed by the given array.
* The elements of the array will be rearranged so to form a heap (this is
* more efficient than enqueing the elements of <code>a</code> one by one).
*
* @param refArray the reference array.
* @param a an array of indices into <code>refArray</code>.
*/
public ByteHeapIndirectPriorityQueue( final byte[] refArray, final int[] a ) {
this( refArray, a, a.length );
}
@SuppressWarnings("unchecked")
public void enqueue( final int x ) {
if ( inv[ x ] >= 0 ) throw new IllegalArgumentException( "Index " + x + " belongs to the queue" );
if ( size == heap.length ) heap = IntArrays.grow( heap, size + 1 );
inv[ heap[ size ] = x ] = size++;
ByteIndirectHeaps.upHeap( refArray, heap, inv, size, size - 1, c );
}
public boolean contains( final int index ) {
return inv[ index ] >= 0;
}
public int dequeue() {
if ( size == 0 ) throw new NoSuchElementException();
final int result = heap[ 0 ];
if ( --size != 0 ) inv[ heap[ 0 ] = heap[ size ] ] = 0;
inv[ result ] = -1;
if ( size != 0 ) ByteIndirectHeaps.downHeap( refArray, heap, inv, size, 0, c );
return result;
}
public void changed() {
ByteIndirectHeaps.downHeap( refArray, heap, inv, size, 0, c );
}
public void changed( final int index ) {
final int pos = inv[ index ];
if ( pos < 0 ) throw new IllegalArgumentException( "Index " + index + " does not belong to the queue" );
final int newPos = ByteIndirectHeaps.upHeap( refArray, heap, inv, size, pos, c );
ByteIndirectHeaps.downHeap( refArray, heap, inv, size, newPos, c );
}
/** Rebuilds this heap in a bottom-up fashion.
*/
public void allChanged() {
ByteIndirectHeaps.makeHeap( refArray, heap, inv, size, c );
}
public boolean remove( final int index ) {
final int result = inv[ index ];
if ( result < 0 ) return false;
inv[ index ] = -1;
if ( result < --size ) {
inv[ heap[ result ] = heap[ size ] ] = result;
final int newPos = ByteIndirectHeaps.upHeap( refArray, heap, inv, size, result, c );
ByteIndirectHeaps.downHeap( refArray, heap, inv, size, newPos, c );
}
return true;
}
public void clear() {
size = 0;
IntArrays.fill( inv, -1 );
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-mq/src/main/java/com/amazonaws/services/mq/model/transform/DeleteTagsResultJsonUnmarshaller.java | 1558 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.mq.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.mq.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DeleteTagsResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteTagsResultJsonUnmarshaller implements Unmarshaller<DeleteTagsResult, JsonUnmarshallerContext> {
public DeleteTagsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DeleteTagsResult deleteTagsResult = new DeleteTagsResult();
return deleteTagsResult;
}
private static DeleteTagsResultJsonUnmarshaller instance;
public static DeleteTagsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DeleteTagsResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
whisher/angular-expresso | gulp/tasks/serve.js | 912 | 'use strict';
var gulp = require('gulp');
var browserSync = require('browser-sync');
var nodemon = require('gulp-nodemon');
var BROWSER_SYNC_RELOAD_DELAY = 3000;
gulp.task('nodemon', function (cb) {
var called = false;
return nodemon({
// nodemon our expressjs server
script: 'server.js',
// watch core server file(s) that require server restart on change
watch: ['server.js']
})
.on('start', function onStart() {
// ensure start only got called once
if (!called) { cb(); }
called = true;
})
.on('restart', function onRestart() {
// reload connected browsers after a slight delay
setTimeout(function reload() {
browserSync.reload({
stream: false //
});
}, BROWSER_SYNC_RELOAD_DELAY);
});
});
module.exports = gulp.task('serve',['nodemon'], function () {
browserSync({
proxy: 'http://localhost:3000',
port:3001,
browser: ['firefox']
});
}); | apache-2.0 |
kibertoad/swampmachine | swampmachine-api/src/main/java/net/kiberion/swampmachine/api/elements/ButtonContainer.java | 451 | package net.kiberion.swampmachine.api.elements;
import java.util.Collection;
public interface ButtonContainer {
public default void setButtons(Collection<ButtonEntry> entrySource) {
clear();
if (entrySource != null) {
for (ButtonEntry buttonEntry : entrySource) {
addButton(buttonEntry);
}
}
}
public void addButton(ButtonEntry buttonEntry);
public void clear();
}
| apache-2.0 |
chinnurtb/OpenAdStack | External/HtmlScripts/uploadify/uploadify.js | 12717 | /*
Uploadify v2.1.0
Release Date: August 24, 2009
Copyright (c) 2009 Ronnie Garcia, Travis Nickels
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
if(jQuery)(
function(jQuery){
jQuery.extend(jQuery.fn,{
uploadify:function(options) {
jQuery(this).each(function(){
settings = jQuery.extend({
id : jQuery(this).attr('id'), // The ID of the object being Uploadified
uploader : 'uploadify.swf', // The path to the uploadify swf file
script : 'uploadify.php', // The path to the uploadify backend upload script
expressInstall : null, // The path to the express install swf file
folder : '', // The path to the upload folder
height : 30, // The height of the flash button
width : 110, // The width of the flash button
cancelImg : 'cancel.png', // The path to the cancel image for the default file queue item container
wmode : 'opaque', // The wmode of the flash file
scriptAccess : 'sameDomain', // Set to "always" to allow script access across domains
fileDataName : 'Filedata', // The name of the file collection object in the backend upload script
method : 'POST', // The method for sending variables to the backend upload script
queueSizeLimit : 999, // The maximum size of the file queue
simUploadLimit : 1, // The number of simultaneous uploads allowed
queueID : false, // The optional ID of the queue container
displayData : 'percentage', // Set to "speed" to show the upload speed in the default queue item
onInit : function() {}, // Function to run when uploadify is initialized
onSelect : function() {}, // Function to run when a file is selected
onQueueFull : function() {}, // Function to run when the queue reaches capacity
onCheck : function() {}, // Function to run when script checks for duplicate files on the server
onCancel : function() {}, // Function to run when an item is cleared from the queue
onError : function() {}, // Function to run when an upload item returns an error
onProgress : function() {}, // Function to run each time the upload progress is updated
onComplete : function() {}, // Function to run when an upload is completed
onAllComplete : function() {} // Functino to run when all uploads are completed
}, options);
var pagePath = location.pathname;
pagePath = pagePath.split('/');
pagePath.pop();
pagePath = pagePath.join('/') + '/';
var data = {};
data.uploadifyID = settings.id;
data.pagepath = pagePath;
if (settings.buttonImg) data.buttonImg = escape(settings.buttonImg);
if (settings.buttonText) data.buttonText = escape(settings.buttonText);
if (settings.rollover) data.rollover = true;
data.script = settings.script;
data.folder = escape(settings.folder);
if (settings.scriptData) {
var scriptDataString = '';
for (var name in settings.scriptData) {
scriptDataString += '&' + name + '=' + settings.scriptData[name];
}
data.scriptData = escape(scriptDataString.substr(1));
}
data.width = settings.width;
data.height = settings.height;
data.wmode = settings.wmode;
data.method = settings.method;
data.queueSizeLimit = settings.queueSizeLimit;
data.simUploadLimit = settings.simUploadLimit;
if (settings.hideButton) data.hideButton = true;
if (settings.fileDesc) data.fileDesc = settings.fileDesc;
if (settings.fileExt) data.fileExt = settings.fileExt;
if (settings.multi) data.multi = true;
if (settings.auto) data.auto = true;
if (settings.sizeLimit) data.sizeLimit = settings.sizeLimit;
if (settings.checkScript) data.checkScript = settings.checkScript;
if (settings.fileDataName) data.fileDataName = settings.fileDataName;
if (settings.queueID) data.queueID = settings.queueID;
if (settings.onInit() !== false) {
jQuery(this).css('display','none');
jQuery(this).after('<div id="' + jQuery(this).attr('id') + 'Uploader"></div>');
swfobject.embedSWF(settings.uploader, settings.id + 'Uploader', settings.width, settings.height, '9.0.24', settings.expressInstall, data, {'quality':'high','wmode':settings.wmode,'allowScriptAccess':settings.scriptAccess});
if (settings.queueID == false) {
jQuery("#" + jQuery(this).attr('id') + "Uploader").after('<div id="' + jQuery(this).attr('id') + 'Queue" class="uploadifyQueue"></div>');
}
}
if (typeof(settings.onOpen) == 'function') {
jQuery(this).bind("uploadifyOpen", settings.onOpen);
}
jQuery(this).bind("uploadifySelect", {'action': settings.onSelect, 'queueID': settings.queueID}, function(event, ID, fileObj) {
if (event.data.action(event, ID, fileObj) !== false) {
var byteSize = Math.round(fileObj.size / 1024 * 100) * .01;
var suffix = 'KB';
if (byteSize > 1000) {
byteSize = Math.round(byteSize *.001 * 100) * .01;
suffix = 'MB';
}
var sizeParts = byteSize.toString().split('.');
if (sizeParts.length > 1) {
byteSize = sizeParts[0] + '.' + sizeParts[1].substr(0,2);
} else {
byteSize = sizeParts[0];
}
if (fileObj.name.length > 20) {
fileName = fileObj.name.substr(0,20) + '...';
} else {
fileName = fileObj.name;
}
queue = '#' + jQuery(this).attr('id') + 'Queue';
if (event.data.queueID) {
queue = '#' + event.data.queueID;
}
jQuery(queue).append('<div id="' + jQuery(this).attr('id') + ID + '" class="uploadifyQueueItem">\
<div class="cancel">\
<a href="javascript:jQuery(\'#' + jQuery(this).attr('id') + '\').uploadifyCancel(\'' + ID + '\')"><img src="' + settings.cancelImg + '" border="0" /></a>\
</div>\
<span class="fileName">' + fileName + ' (' + byteSize + suffix + ')</span><span class="percentage"></span>\
<div class="uploadifyProgress">\
<div id="' + jQuery(this).attr('id') + ID + 'ProgressBar" class="uploadifyProgressBar"><!--Progress Bar--></div>\
</div>\
</div>');
}
});
if (typeof(settings.onSelectOnce) == 'function') {
jQuery(this).bind("uploadifySelectOnce", settings.onSelectOnce);
}
jQuery(this).bind("uploadifyQueueFull", {'action': settings.onQueueFull}, function(event, queueSizeLimit) {
if (event.data.action(event, queueSizeLimit) !== false) {
// alert('The queue is full. The maximum number of creatives you can upload at once is ' + queueSizeLimit + '.');
}
});
jQuery(this).bind("uploadifyCheckExist", {'action': settings.onCheck}, function(event, checkScript, fileQueueObj, folder, single) {
var postData = new Object();
postData = fileQueueObj;
postData.folder = pagePath + folder;
if (single) {
for (var ID in fileQueueObj) {
var singleFileID = ID;
}
}
jQuery.post(checkScript, postData, function(data) {
for(var key in data) {
if (event.data.action(event, checkScript, fileQueueObj, folder, single) !== false) {
var replaceFile = confirm("Do you want to replace the file " + data[key] + "?");
if (!replaceFile) {
document.getElementById(jQuery(event.target).attr('id') + 'Uploader').cancelFileUpload(key, true,true);
}
}
}
if (single) {
document.getElementById(jQuery(event.target).attr('id') + 'Uploader').startFileUpload(singleFileID, true);
} else {
document.getElementById(jQuery(event.target).attr('id') + 'Uploader').startFileUpload(null, true);
}
}, "json");
});
jQuery(this).bind("uploadifyCancel", {'action': settings.onCancel}, function(event, ID, fileObj, data, clearFast) {
if (event.data.action(event, ID, fileObj, data, clearFast) !== false) {
var fadeSpeed = (clearFast == true) ? 0 : 250;
jQuery("#" + jQuery(this).attr('id') + ID).fadeOut(fadeSpeed, function() { jQuery(this).remove() });
}
});
if (typeof(settings.onClearQueue) == 'function') {
jQuery(this).bind("uploadifyClearQueue", settings.onClearQueue);
}
var errorArray = [];
jQuery(this).bind("uploadifyError", {'action': settings.onError}, function(event, ID, fileObj, errorObj) {
if (event.data.action(event, ID, fileObj, errorObj) !== false) {
var fileArray = new Array(ID, fileObj, errorObj);
errorArray.push(fileArray);
jQuery("#" + jQuery(this).attr('id') + ID + " .percentage").text(" - " + errorObj.type + " Error");
jQuery("#" + jQuery(this).attr('id') + ID).addClass('uploadifyError');
}
});
jQuery(this).bind("uploadifyProgress", {'action': settings.onProgress, 'toDisplay': settings.displayData}, function(event, ID, fileObj, data) {
if (event.data.action(event, ID, fileObj, data) !== false) {
jQuery("#" + jQuery(this).attr('id') + ID + "ProgressBar").css('width', data.percentage + '%');
if (event.data.toDisplay == 'percentage') displayData = ' - ' + data.percentage + '%';
if (event.data.toDisplay == 'speed') displayData = ' - ' + data.speed + 'KB/s';
if (event.data.toDisplay == null) displayData = ' ';
jQuery("#" + jQuery(this).attr('id') + ID + " .percentage").text(displayData);
}
});
jQuery(this).bind("uploadifyComplete", {'action': settings.onComplete}, function(event, ID, fileObj, response, data) {
if (event.data.action(event, ID, fileObj, unescape(response), data) !== false) {
jQuery("#" + jQuery(this).attr('id') + ID + " .percentage").text(' - Completed');
jQuery("#" + jQuery(this).attr('id') + ID).fadeOut(250, function() { jQuery(this).remove()});
}
});
if (typeof(settings.onAllComplete) == 'function') {
jQuery(this).bind("uploadifyAllComplete", {'action': settings.onAllComplete}, function(event, uploadObj) {
if (event.data.action(event, uploadObj) !== false) {
errorArray = [];
}
});
}
});
},
uploadifySettings:function(settingName, settingValue, resetObject) {
var returnValue = false;
jQuery(this).each(function() {
if (settingName == 'scriptData' && settingValue != null) {
if (resetObject) {
var scriptData = settingValue;
} else {
var scriptData = jQuery.extend(settings.scriptData, settingValue);
}
var scriptDataString = '';
for (var name in scriptData) {
scriptDataString += '&' + name + '=' + escape(scriptData[name]);
}
settingValue = scriptDataString.substr(1);
}
returnValue = document.getElementById(jQuery(this).attr('id') + 'Uploader').updateSettings(settingName, settingValue);
});
if (settingValue == null) {
if (settingName == 'scriptData') {
var returnSplit = unescape(returnValue).split('&');
var returnObj = new Object();
for (var i = 0; i < returnSplit.length; i++) {
var iSplit = returnSplit[i].split('=');
returnObj[iSplit[0]] = iSplit[1];
}
returnValue = returnObj;
}
return returnValue;
}
},
uploadifyUpload:function(ID) {
jQuery(this).each(function() {
document.getElementById(jQuery(this).attr('id') + 'Uploader').startFileUpload(ID, false);
});
},
uploadifyCancel:function(ID) {
jQuery(this).each(function() {
document.getElementById(jQuery(this).attr('id') + 'Uploader').cancelFileUpload(ID, true, false);
});
},
uploadifyClearQueue:function() {
jQuery(this).each(function() {
document.getElementById(jQuery(this).attr('id') + 'Uploader').clearFileUploadQueue(false);
});
}
})
})(jQuery); | apache-2.0 |
tascape/reactor | src/main/java/com/tascape/reactor/Utils.java | 19629 | /*
* Copyright (c) 2015 - present Nebula Bay.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tascape.reactor;
import java.awt.AWTException;
import java.awt.Desktop;
import java.awt.Rectangle;
import java.awt.Robot;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.imageio.ImageIO;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Collection of utility methods.
* <p>
* @author linsong wang
*/
public class Utils {
private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
public static final String PASS = Utils.class + "_PASS";
public static final String FAIL = Utils.class + "_FAIL";
private static final String DATE_TIME_FORMAT = "yyyy-MM-dd' 'HH:mm:ss.SSS";
private static final String TIME_FORMAT = "HH:mm:ss.SSS";
private static final String DATE_TIME_STRING = "yyyy_MM_dd_HH_mm_ss_SSS";
/**
* system specific file path separator, "/" for Linux, and "\" for Windows, etc
*/
public static final String FS = System.getProperty("file.separator");
public static void openFile(File file) {
Desktop desktop = Desktop.getDesktop();
try {
desktop.open(file);
} catch (IOException ex) {
LOG.warn("Cannot open file {}", file, ex);
}
}
private Utils() throws UnsupportedOperationException {
throw new UnsupportedOperationException("Please use Utils.method");
}
public static String formatDateTime(long milliSinceEpoch) {
return new SimpleDateFormat(DATE_TIME_FORMAT).format(new Date(milliSinceEpoch));
}
public static String formatTime(long milliSinceEpoch) {
return new SimpleDateFormat(TIME_FORMAT).format(new Date(milliSinceEpoch));
}
public static String formatTime(long milliSinceEpoch, String format) {
DateFormat formatter = new SimpleDateFormat(format);
return formatter.format(new Date(milliSinceEpoch));
}
public static String getCurrentTime() {
return new SimpleDateFormat(DATE_TIME_STRING).format(System.currentTimeMillis());
}
public static String getCurrentTime(String format) {
DateFormat formatter = new SimpleDateFormat(format);
return formatter.format(new Date());
}
/**
* Executes command, and waits for the expected phrase in console printout.
*
* @param command command line
*
* @return console output as a list of strings
*
* @throws IOException for command error
* @throws InterruptedException when interrupted
*/
public static List<String> cmd(String command) throws IOException, InterruptedException {
return cmd(command.split(" "));
}
/**
* Executes command, and waits for the expected phrase in console printout.
*
* @param command command line
*
* @return console output as a list of strings
*
* @throws IOException for command error
* @throws InterruptedException when interrupted
*/
public static List<String> cmd(String[] command) throws IOException, InterruptedException {
return cmd(command, null, null, 300000L, null);
}
/**
* Executes command, and waits for the expected phrase in console printout.
*
* @param command command line
* @param workindDir working directory
* @param timeout in millisecond
*
* @return console output as a list of strings
*
* @throws IOException for command error
* @throws InterruptedException when interrupted
*/
public static List<String> cmdWithWorkingDir(String[] command, String workindDir, final long timeout)
throws IOException, InterruptedException {
return cmd(command, null, null, timeout, workindDir);
}
/**
* Executes command, and waits for the expected phrase in console printout.
* <p>
* @param command command line
* @param expected some expected output
*
* @return console output as a list of strings
*
* @throws IOException for command error
* @throws InterruptedException when interrupted
*/
public static List<String> cmd(String[] command, String expected) throws IOException, InterruptedException {
return cmd(command, expected, null, 300000L, null);
}
/**
* Executes command, and waits for the expected pass/fail phrase in console printout within timeout,
*
* @param command command line
* @param pass skip checking if null
* @param fail skip checking if null
* @param timeout set 0 for not to check the output message, otherwise, waiting for timeout
* @param workingDir working directory
*
* @return console output as a list of strings
*
* @throws IOException for command error
* @throws InterruptedException when interrupted
*/
public static List<String> cmd(String[] command, final String pass, final String fail, final long timeout,
final String workingDir) throws IOException, InterruptedException {
ProcessBuilder pb = new ProcessBuilder(command);
if (workingDir != null) {
pb.directory(new File(workingDir));
}
pb.redirectErrorStream(true);
LOG.debug("Running command: " + pb.command().toString().replace(",", ""));
final Process p = pb.start();
Thread thread;
final List<String> output = new ArrayList<>();
if (timeout == 0) {
LOG.debug("This is a start-and-exit command");
output.add(PASS);
return output;
} else {
thread = new Thread() {
@Override
public void run() {
try {
LOG.debug("Command timeouts in {} ms", timeout);
Thread.sleep(timeout);
try {
p.exitValue();
} catch (IllegalThreadStateException ex) {
LOG.debug("killing subprocess {} - {}", p, ex.getMessage());
p.destroy();
}
} catch (InterruptedException ex) {
LOG.trace(ex.getMessage());
}
}
};
thread.setName(Thread.currentThread().getName() + "-" + p.hashCode());
thread.setDaemon(true);
thread.start();
}
BufferedReader stdIn = new BufferedReader(new InputStreamReader(p.getInputStream()));
String c = p + " - ";
for (String line = stdIn.readLine(); line != null;) {
LOG.trace("{}{}", c, line);
output.add(line);
try {
line = stdIn.readLine();
} catch (IOException ex) {
LOG.warn(ex.getMessage());
break;
}
}
LOG.debug("Command exit code {}", p.waitFor());
thread.interrupt();
try {
stdIn.close();
} catch (IOException ex) {
LOG.warn("", ex);
}
for (String s : output) {
if (pass != null && (s.contains(pass) || s.matches(pass))) {
output.add(PASS);
break;
} else if (fail != null && s.contains(fail)) {
output.add(FAIL);
break;
}
}
return output;
}
public static Process cmdAsync(String[] commands) throws IOException {
ProcessBuilder pb = new ProcessBuilder(commands);
pb.redirectErrorStream(true);
LOG.debug("Running command {}", pb.command().toString().replaceAll(",", ""));
return pb.start();
}
public static Process cmd(String[] commands, final File file) throws IOException {
return cmd(commands, file, null, new String[0]);
}
public static Process cmd(String[] commands, final File file, final File workingDir) throws IOException {
return cmd(commands, file, workingDir, new String[0]);
}
public static Process cmd(String[] commands, final File file, final File workingDir, final String... ignoreRegex)
throws IOException {
FileUtils.touch(file);
LOG.debug("Saving console output to {}", file.getAbsolutePath());
ProcessBuilder pb = new ProcessBuilder(commands);
pb.redirectErrorStream(true);
pb.directory(workingDir);
LOG.debug("Running command {}: {}", workingDir == null ? "" : workingDir.getAbsolutePath(),
pb.command().toString().replaceAll(",", ""));
final Process p = pb.start();
Thread t = new Thread(Thread.currentThread().getName() + "-" + p.hashCode()) {
@Override
public void run() {
BufferedReader stdIn = new BufferedReader(new InputStreamReader(p.getInputStream()));
String console = "console-" + stdIn.hashCode();
try (PrintWriter pw = new PrintWriter(file)) {
for (String line = stdIn.readLine(); line != null;) {
LOG.trace("{}: {}", console, line);
if (null == ignoreRegex || ignoreRegex.length == 0) {
pw.println(line);
} else {
boolean ignore = false;
for (String regex : ignoreRegex) {
if (!regex.isEmpty() && (line.contains(regex) || line.matches(regex))) {
ignore = true;
break;
}
}
if (!ignore) {
pw.println(line);
}
}
pw.flush();
line = stdIn.readLine();
}
} catch (IOException ex) {
LOG.warn(ex.getMessage());
}
LOG.trace("command is done");
}
};
t.setDaemon(true);
t.start();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (p != null) {
p.destroy();
}
}
});
return p;
}
public static void waitForProcess(final Process process, final int timeoutSeconds) {
Thread t = new Thread() {
@Override
public void run() {
try {
Thread.sleep(timeoutSeconds * 1000);
LOG.warn("Timeed out in {} seconds", timeoutSeconds);
} catch (InterruptedException ex) {
LOG.warn(ex.getMessage());
} finally {
if (process != null) {
process.destroy();
}
}
}
};
t.setDaemon(true);
t.start();
if (process != null) {
int exitValue;
try {
exitValue = process.waitFor();
LOG.debug("process {} exits with {}", process, exitValue);
} catch (InterruptedException ex) {
LOG.warn(ex.getMessage());
}
}
}
public static void waitForOutput(final Process process, final int timeoutSeconds) throws IOException {
waitForOutputLine(process, null, timeoutSeconds);
}
public static boolean waitForOutputLine(final Process process, String lineExpected, final int timeoutSeconds)
throws IOException {
Thread t = new Thread() {
@Override
public void run() {
try {
Thread.sleep(timeoutSeconds * 1000);
LOG.warn("Timed out in {} seconds", timeoutSeconds);
} catch (InterruptedException ex) {
LOG.warn(ex.getMessage());
} finally {
if (process != null) {
process.destroy();
}
}
}
};
if (process == null) {
return false;
}
t.setName(Thread.currentThread().getName() + "-" + t.hashCode());
t.setDaemon(true);
t.start();
try (BufferedReader stdIn = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
for (String line = stdIn.readLine(); line != null;) {
LOG.debug(line);
if (StringUtils.isNotBlank(lineExpected) && line.contains(lineExpected)) {
LOG.info("Found expected line '{}'", line);
return true;
}
line = stdIn.readLine();
}
} finally {
t.interrupt();
}
return false;
}
public static void deleteFileAfterMinutes(final File file, final int minutes) {
file.deleteOnExit();
Thread t = new Thread() {
@Override
public void run() {
try {
Thread.sleep(minutes * 60000);
FileUtils.deleteQuietly(file);
} catch (InterruptedException ex) {
LOG.trace(ex.getMessage());
}
}
};
t.setDaemon(true);
t.start();
}
public static String getUniqueId() {
return getUniqueId("");
}
public static String getUniqueId(String prefix) {
return new StringBuilder(prefix).append(UUID.randomUUID()).toString().replaceAll("-", "_");
}
public static void sleep(long millis, String message) throws InterruptedException {
sleep(millis, 5000, message);
}
public static void sleep(long millis, int interval, String message) throws InterruptedException {
long end = System.currentTimeMillis() + millis;
while (true) {
LOG.debug("{}", message);
long t = end - System.currentTimeMillis();
if (t > interval) {
Thread.sleep(interval);
} else if (t > 0) {
Thread.sleep(t);
break;
} else {
break;
}
}
}
public static String getKeepAliveFileName(String fileName) {
return SystemConfiguration.CONSTANT_LOG_KEEP_ALIVE_PREFIX + fileName;
}
public static File getKeepAliveFile(File file) {
String name = Utils.getKeepAliveFileName(file.getName());
return Paths.get(file.getParent(), name).toFile();
}
/**
*
* @param path directory to clean
* @param keepAliveHour any file/directory having last modified time longer than keepAliveHour will be deleted
* @param namePrefix file name prefix
*/
public static void cleanDirectory(final String path, final float keepAliveHour, final String namePrefix) {
final long intervalMillis = 3600000;
final File dir = new File(path);
if (!dir.exists()) {
return;
}
Thread thread = new Thread() {
@Override
public void run() {
while (true) {
long lastModifiedMillis = (long) (System.currentTimeMillis() - keepAliveHour * 3600000);
Collection<File> files = FileUtils.listFiles(dir, null, true);
for (File file : files) {
if (file.lastModified() < lastModifiedMillis && file.getName().startsWith(namePrefix)) {
LOG.debug("Delete {}", file);
if (!FileUtils.deleteQuietly(file)) {
LOG.debug("Cannot delete {}", file);
}
}
}
try {
LOG.debug("Waiting for next cleanup...");
Thread.sleep(intervalMillis);
} catch (InterruptedException ex) {
LOG.warn(ex.getMessage());
return;
}
}
}
};
thread.setName(Thread.currentThread().getName() + "-cleaning-" + thread.hashCode());
thread.setDaemon(true);
LOG.debug("Starting directory cleaning thread (scanning hourly), all files/directories in {} and older than {} "
+ "hour(s) and starts with {} will be deleted", dir, keepAliveHour, namePrefix);
thread.start();
}
public static boolean isWindows() {
String os = System.getProperty("os.name");
return os.contains("Windows");
}
public static String buildClassPath(String[] paths) {
String classPathDelimiter = Utils.isWindows() ? ";" : ":";
return StringUtils.join(paths, classPathDelimiter).replaceAll("\\\\", "/");
}
/**
*
* @param png picture file name
*
* @throws AWTException UI related exception
* @throws IOException file IO issue
*/
public static void captureScreen(File png) throws AWTException, IOException {
Robot robot = new Robot();
BufferedImage image = robot.createScreenCapture(new Rectangle(Toolkit.getDefaultToolkit().getScreenSize()));
ImageIO.write(image, "png", png);
LOG.debug("Save screenshot to {}", png.getAbsolutePath());
}
public static long getTime(String time, String format) throws ParseException {
DateFormat formatter = new SimpleDateFormat(format);
Date date = formatter.parse(time);
return date.getTime();
}
public static void main(String[] args) throws Exception {
{
Process p = cmdAsync(new String[]{"./ping.sh"});
waitForOutput(p, 11);
}
}
}
| apache-2.0 |
abiosoft/caddy | replacer.go | 8469 | // Copyright 2015 Matthew Holt and The Caddy Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddy
import (
"fmt"
"os"
"path/filepath"
"runtime"
"strconv"
"strings"
"time"
)
// NewReplacer returns a new Replacer.
func NewReplacer() *Replacer {
rep := &Replacer{
static: make(map[string]interface{}),
}
rep.providers = []ReplacerFunc{
globalDefaultReplacements,
rep.fromStatic,
}
return rep
}
// Replacer can replace values in strings.
// A default/empty Replacer is not valid;
// use NewReplacer to make one.
type Replacer struct {
providers []ReplacerFunc
static map[string]interface{}
}
// Map adds mapFunc to the list of value providers.
// mapFunc will be executed only at replace-time.
func (r *Replacer) Map(mapFunc ReplacerFunc) {
r.providers = append(r.providers, mapFunc)
}
// Set sets a custom variable to a static value.
func (r *Replacer) Set(variable string, value interface{}) {
r.static[variable] = value
}
// Get gets a value from the replacer. It returns
// the value and whether the variable was known.
func (r *Replacer) Get(variable string) (interface{}, bool) {
for _, mapFunc := range r.providers {
if val, ok := mapFunc(variable); ok {
return val, true
}
}
return nil, false
}
// GetString is the same as Get, but coerces the value to a
// string representation.
func (r *Replacer) GetString(variable string) (string, bool) {
s, found := r.Get(variable)
return toString(s), found
}
// Delete removes a variable with a static value
// that was created using Set.
func (r *Replacer) Delete(variable string) {
delete(r.static, variable)
}
// fromStatic provides values from r.static.
func (r *Replacer) fromStatic(key string) (interface{}, bool) {
val, ok := r.static[key]
return val, ok
}
// ReplaceOrErr is like ReplaceAll, but any placeholders
// that are empty or not recognized will cause an error to
// be returned.
func (r *Replacer) ReplaceOrErr(input string, errOnEmpty, errOnUnknown bool) (string, error) {
return r.replace(input, "", false, errOnEmpty, errOnUnknown, nil)
}
// ReplaceKnown is like ReplaceAll but only replaces
// placeholders that are known (recognized). Unrecognized
// placeholders will remain in the output.
func (r *Replacer) ReplaceKnown(input, empty string) string {
out, _ := r.replace(input, empty, false, false, false, nil)
return out
}
// ReplaceAll efficiently replaces placeholders in input with
// their values. All placeholders are replaced in the output
// whether they are recognized or not. Values that are empty
// string will be substituted with empty.
func (r *Replacer) ReplaceAll(input, empty string) string {
out, _ := r.replace(input, empty, true, false, false, nil)
return out
}
// ReplaceFunc is the same as ReplaceAll, but calls f for every
// replacement to be made, in case f wants to change or inspect
// the replacement.
func (r *Replacer) ReplaceFunc(input string, f ReplacementFunc) (string, error) {
return r.replace(input, "", true, false, false, f)
}
func (r *Replacer) replace(input, empty string,
treatUnknownAsEmpty, errOnEmpty, errOnUnknown bool,
f ReplacementFunc) (string, error) {
if !strings.Contains(input, string(phOpen)) {
return input, nil
}
var sb strings.Builder
// it is reasonable to assume that the output
// will be approximately as long as the input
sb.Grow(len(input))
// iterate the input to find each placeholder
var lastWriteCursor int
scan:
for i := 0; i < len(input); i++ {
// check for escaped braces
if i > 0 && input[i-1] == phEscape && (input[i] == phClose || input[i] == phOpen) {
sb.WriteString(input[lastWriteCursor : i-1])
lastWriteCursor = i
continue
}
if input[i] != phOpen {
continue
}
// find the end of the placeholder
end := strings.Index(input[i:], string(phClose)) + i
if end < i {
continue
}
// if necessary look for the first closing brace that is not escaped
for end > 0 && end < len(input)-1 && input[end-1] == phEscape {
nextEnd := strings.Index(input[end+1:], string(phClose))
if nextEnd < 0 {
continue scan
}
end += nextEnd + 1
}
// write the substring from the last cursor to this point
sb.WriteString(input[lastWriteCursor:i])
// trim opening bracket
key := input[i+1 : end]
// try to get a value for this key, handle empty values accordingly
val, found := r.Get(key)
if !found {
// placeholder is unknown (unrecognized); handle accordingly
if errOnUnknown {
return "", fmt.Errorf("unrecognized placeholder %s%s%s",
string(phOpen), key, string(phClose))
} else if !treatUnknownAsEmpty {
// if treatUnknownAsEmpty is true, we'll handle an empty
// val later; so only continue otherwise
lastWriteCursor = i
continue
}
}
// apply any transformations
if f != nil {
var err error
val, err = f(key, val)
if err != nil {
return "", err
}
}
// convert val to a string as efficiently as possible
valStr := toString(val)
// write the value; if it's empty, either return
// an error or write a default value
if valStr == "" {
if errOnEmpty {
return "", fmt.Errorf("evaluated placeholder %s%s%s is empty",
string(phOpen), key, string(phClose))
} else if empty != "" {
sb.WriteString(empty)
}
} else {
sb.WriteString(valStr)
}
// advance cursor to end of placeholder
i = end
lastWriteCursor = i + 1
}
// flush any unwritten remainder
sb.WriteString(input[lastWriteCursor:])
return sb.String(), nil
}
func toString(val interface{}) string {
switch v := val.(type) {
case nil:
return ""
case string:
return v
case fmt.Stringer:
return v.String()
case byte:
return string(v)
case []byte:
return string(v)
case []rune:
return string(v)
case int:
return strconv.Itoa(v)
case int32:
return strconv.Itoa(int(v))
case int64:
return strconv.Itoa(int(v))
case uint:
return strconv.Itoa(int(v))
case uint32:
return strconv.Itoa(int(v))
case uint64:
return strconv.Itoa(int(v))
case float32:
return strconv.FormatFloat(float64(v), 'f', -1, 32)
case float64:
return strconv.FormatFloat(v, 'f', -1, 64)
case bool:
if v {
return "true"
}
return "false"
default:
return fmt.Sprintf("%+v", v)
}
}
// ReplacerFunc is a function that returns a replacement
// for the given key along with true if the function is able
// to service that key (even if the value is blank). If the
// function does not recognize the key, false should be
// returned.
type ReplacerFunc func(key string) (interface{}, bool)
func globalDefaultReplacements(key string) (interface{}, bool) {
// check environment variable
const envPrefix = "env."
if strings.HasPrefix(key, envPrefix) {
return os.Getenv(key[len(envPrefix):]), true
}
switch key {
case "system.hostname":
// OK if there is an error; just return empty string
name, _ := os.Hostname()
return name, true
case "system.slash":
return string(filepath.Separator), true
case "system.os":
return runtime.GOOS, true
case "system.arch":
return runtime.GOARCH, true
case "time.now":
return nowFunc(), true
case "time.now.common_log":
return nowFunc().Format("02/Jan/2006:15:04:05 -0700"), true
case "time.now.year":
return strconv.Itoa(nowFunc().Year()), true
}
return nil, false
}
// ReplacementFunc is a function that is called when a
// replacement is being performed. It receives the
// variable (i.e. placeholder name) and the value that
// will be the replacement, and returns the value that
// will actually be the replacement, or an error. Note
// that errors are sometimes ignored by replacers.
type ReplacementFunc func(variable string, val interface{}) (interface{}, error)
// nowFunc is a variable so tests can change it
// in order to obtain a deterministic time.
var nowFunc = time.Now
// ReplacerCtxKey is the context key for a replacer.
const ReplacerCtxKey CtxKey = "replacer"
const phOpen, phClose, phEscape = '{', '}', '\\'
| apache-2.0 |
liuwharton/runc | libcontainer/cgroups/fs/net_cls.go | 832 | package fs
import (
"github.com/opencontainers/runc/libcontainer/cgroups"
"github.com/opencontainers/runc/libcontainer/configs"
)
type NetClsGroup struct {
}
func (s *NetClsGroup) Name() string {
return "net_cls"
}
func (s *NetClsGroup) Apply(d *data) error {
dir, err := d.join("net_cls")
if err != nil && !cgroups.IsNotFound(err) {
return err
}
if err := s.Set(dir, d.c); err != nil {
return err
}
return nil
}
func (s *NetClsGroup) Set(path string, cgroup *configs.Cgroup) error {
if cgroup.NetClsClassid != "" {
if err := writeFile(path, "net_cls.classid", cgroup.NetClsClassid); err != nil {
return err
}
}
return nil
}
func (s *NetClsGroup) Remove(d *data) error {
return removePath(d.path("net_cls"))
}
func (s *NetClsGroup) GetStats(path string, stats *cgroups.Stats) error {
return nil
}
| apache-2.0 |
csoroiu/untwist | src/main/java/ro/derbederos/untwist/sampling/ReverseGaussianSampler.java | 1452 | /*
* Copyright (c) 2017-2018 Claudiu Soroiu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.derbederos.untwist.sampling;
import org.apache.commons.rng.sampling.distribution.GaussianSampler;
import ro.derbederos.untwist.ReverseNormalizedGaussianSampler;
public class ReverseGaussianSampler extends GaussianSampler {
private final ReverseNormalizedGaussianSampler normalized;
/**
* @param normalized Generator of N(0,1) Gaussian distributed random numbers.
* @param mean Mean of the Gaussian distribution.
* @param standardDeviation Standard deviation of the Gaussian distribution.
*/
public ReverseGaussianSampler(ReverseNormalizedGaussianSampler normalized, double mean, double standardDeviation) {
super(normalized, mean, standardDeviation);
this.normalized = normalized;
}
public void undoSample() {
this.normalized.undoSample();
}
}
| apache-2.0 |
consulo/consulo-bash | src/main/java/com/ansorgit/plugins/bash/editor/formatting/processor/BashSpacingProcessor.java | 4565 | /*******************************************************************************
* Copyright 2011 Joachim Ansorg, mail@ansorg-it.com
* File: BashSpacingProcessor.java, Class: BashSpacingProcessor
* Last modified: 2011-04-30 16:33
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.ansorgit.plugins.bash.editor.formatting.processor;
import com.ansorgit.plugins.bash.editor.formatting.BashBlock;
import com.ansorgit.plugins.bash.editor.formatting.SpacingUtil;
import com.ansorgit.plugins.bash.lang.psi.BashVisitor;
import com.intellij.formatting.Spacing;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.psi.PsiElement;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.impl.source.SourceTreeToPsiMap;
import com.intellij.psi.impl.source.tree.CompositeElement;
/**
* This code is based on code taken from the Groovy plugin.
*
* @author ilyas, jansorg
*/
public class BashSpacingProcessor extends BashVisitor {
private static final ThreadLocal<BashSpacingProcessor> mySharedProcessorAllocator = new ThreadLocal<BashSpacingProcessor>();
private static final Logger LOG = Logger.getInstance("#SpacingProcessor");
private MyBashSpacingVisitor myBashSpacingVisitor;
public BashSpacingProcessor(MyBashSpacingVisitor myBashSpacingVisitor) {
this.myBashSpacingVisitor = myBashSpacingVisitor;
}
public static Spacing getSpacing(BashBlock child1, BashBlock child2, CodeStyleSettings settings) {
return getSpacing(child2.getNode(), settings);
}
private static Spacing getSpacing(ASTNode node, CodeStyleSettings settings) {
BashSpacingProcessor spacingProcessor = mySharedProcessorAllocator.get();
try {
if (spacingProcessor == null) {
spacingProcessor = new BashSpacingProcessor(new MyBashSpacingVisitor(node, settings));
mySharedProcessorAllocator.set(spacingProcessor);
} else {
spacingProcessor.myBashSpacingVisitor = new MyBashSpacingVisitor(node, settings);
}
spacingProcessor.doInit();
return spacingProcessor.getResult();
} finally {
if (spacingProcessor != null) {
spacingProcessor.clear();
}
}
}
private void clear() {
if (myBashSpacingVisitor != null) {
myBashSpacingVisitor.clear();
}
}
private Spacing getResult() {
return myBashSpacingVisitor.getResult();
}
private void doInit() {
myBashSpacingVisitor.doInit();
}
public void setVisitor(MyBashSpacingVisitor visitor) {
myBashSpacingVisitor = visitor;
}
private static class MyBashSpacingVisitor extends BashVisitor {
private Spacing result;
private CodeStyleSettings mySettings;
private ASTNode myChild2;
private ASTNode myChild1;
private PsiElement myParent;
public MyBashSpacingVisitor(ASTNode node, CodeStyleSettings settings) {
mySettings = settings;
init(node);
}
private void init(final ASTNode child) {
if (child == null) {
return;
}
ASTNode treePrev = child.getTreePrev();
while (treePrev != null && SpacingUtil.isWhiteSpace(treePrev)) {
treePrev = treePrev.getTreePrev();
}
if (treePrev == null) {
init(child.getTreeParent());
} else {
myChild2 = child;
myChild1 = treePrev;
final CompositeElement parent = (CompositeElement) treePrev.getTreeParent();
myParent = SourceTreeToPsiMap.treeElementToPsi(parent);
}
}
public void clear() {
}
public Spacing getResult() {
return result;
}
public void doInit() {
}
}
}
| apache-2.0 |
rafizanbaharum/cfi-gov | src/main/java/net/canang/cfi/biz/jm/manager/workflow/task/ManualJournalApproveTask.java | 1587 | package net.canang.cfi.biz.jm.manager.workflow.task;
import net.canang.cfi.biz.Util;
import net.canang.cfi.core.dd.model.CfCostCenter;
import net.canang.cfi.core.dd.model.CfReferenceNoConstants;
import net.canang.cfi.core.jm.model.CfJournal;
import net.canang.cfi.core.so.model.CfFlowState;
import org.activiti.engine.impl.pvm.delegate.ActivityExecution;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.sql.Timestamp;
/**
*
*/
@Component("manualJournal_approve_ST")
@Transactional
public class ManualJournalApproveTask extends JournalTaskSupport {
private static final Logger log = Logger.getLogger(ManualJournalApproveTask.class);
public void execute(ActivityExecution execution) throws Exception {
log.debug("approving journal");
// retrieve journal from variable
Long journalId = (Long) execution.getVariable("documentId");
CfJournal journal = jmFinder.findJournalById(journalId);
// update flow state
journal.getFlowdata().setState(CfFlowState.APPROVED);
journal.getFlowdata().setApprovedDate(new Timestamp(System.currentTimeMillis()));
journal.getFlowdata().setApprover(Util.getCurrentUser().getId());
CfCostCenter requester = journal.getRequester();
journal.setJournalNo(jmManager.generateReferenceNo(CfReferenceNoConstants.JOURNAL_JOURNAL_NO, requester));
vmManager.post(journal);
glManager.post(journal);
jmManager.updateJournal(journal);
}
}
| apache-2.0 |
phatboyg/Machete | src/Machete.HL7Schema/Generated/V26/Groups/RCI_I05_OBSERVATION.cs | 715 | // This file was automatically generated and may be regenerated at any
// time. To ensure any changes are retained, modify the tool with any segment/component/group/field name
// or type changes.
namespace Machete.HL7Schema.V26
{
using HL7;
/// <summary>
/// RCI_I05_OBSERVATION (Group) -
/// </summary>
public interface RCI_I05_OBSERVATION :
HL7V26Layout
{
/// <summary>
/// OBR
/// </summary>
Segment<OBR> OBR { get; }
/// <summary>
/// NTE
/// </summary>
SegmentList<NTE> NTE { get; }
/// <summary>
/// RESULTS
/// </summary>
LayoutList<RCI_I05_RESULTS> Results { get; }
}
} | apache-2.0 |
astrapi69/wicket | wicket-util/src/main/java/org/apache/wicket/util/resource/ResourceUtils.java | 4659 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.util.resource;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.wicket.util.string.Strings;
/**
*
* @author Juergen Donnerstag
*/
public class ResourceUtils
{
/** The default postfix for minified names (ex: /css/mystyle.min.css) **/
public static final String MIN_POSTFIX_DEFAULT = "min";
/** Regular expression patter to extract the locale from the filename (ex: de_DE) **/
private static final Pattern LOCALE_PATTERN = Pattern.compile("_([a-z]{2})(_([A-Z]{2})(_([^_]+))?)?$");
/** Stores standard ISO country codes from {@code java.util.Locale} **/
private final static Set<String> isoCountries = new HashSet<>(
Arrays.asList(Locale.getISOCountries()));
/** Stores standard ISO language codes from {@code java.util.Locale} **/
private final static Set<String> isoLanguages = new HashSet<>(
Arrays.asList(Locale.getISOLanguages()));
/**
* Return the minified version for a given resource name.
* For example '/css/coolTheme.css' becomes '/css/coolTheme.min.css'
*
* @param name
* The original resource name
* @param minPostfix
* The postfix to use for minified name
* @return The minified resource name
*/
public static String getMinifiedName(String name, String minPostfix)
{
String minifiedName;
int idxOfExtension = name.lastIndexOf('.');
final String dottedPostfix = "." + minPostfix;
if (idxOfExtension > -1)
{
String extension = name.substring(idxOfExtension);
final String baseName = name.substring(0, name.length() - extension.length() + 1);
if (!dottedPostfix.equals(extension) && !baseName.endsWith(dottedPostfix + "."))
{
minifiedName = baseName + minPostfix + extension;
} else
{
minifiedName = name;
}
} else
{
minifiedName = name + dottedPostfix;
}
return minifiedName;
}
/**
* Extract the locale from the filename
*
* @param path
* The file path
* @return The updated path, without the locale
*/
public static PathLocale getLocaleFromFilename(String path)
{
String extension = "";
int pos = path.lastIndexOf('.');
if (pos != -1)
{
extension = path.substring(pos);
path = path.substring(0, pos);
}
String filename = Strings.lastPathComponent(path, '/');
Matcher matcher = LOCALE_PATTERN.matcher(filename);
if (matcher.find())
{
String language = matcher.group(1);
String country = matcher.group(3);
String variant = matcher.group(5);
// did we find a language?
if (language != null)
{
if (isoLanguages.contains(language) == false)
{
language = null;
country = null;
variant = null;
}
}
// did we find a country?
if ((language != null) && (country != null))
{
if (isoCountries.contains(country) == false)
{
country = null;
variant = null;
}
}
if (language != null)
{
pos = path.length() - filename.length() + matcher.start();
String basePath = path.substring(0, pos) + extension;
Locale locale = new Locale(language, country != null ? country : "",
variant != null ? variant : "");
return new PathLocale(basePath, locale);
}
} // else skip the whole thing... probably user specific underscores used
return new PathLocale(path + extension, null);
}
/**
*
*/
public static class PathLocale
{
/** */
public final String path;
/** */
public final Locale locale;
/**
* @param path
* @param locale
*/
public PathLocale(final String path, final Locale locale)
{
this.path = path;
this.locale = locale;
}
}
}
| apache-2.0 |
Blue-Whale-H/coolweather | app/src/main/java/com/coolweather/android/db/City.java | 895 | package com.coolweather.android.db;
import org.litepal.crud.DataSupport;
/**
* Created by blueWhale on 2017/6/19.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
}
| apache-2.0 |
dlwhitehurst/MusicRecital | src/test/java/org/musicrecital/service/MockUserDetailsService.java | 571 | package org.musicrecital.service;
import org.musicrecital.model.User;
import org.springframework.dao.DataAccessException;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
public class MockUserDetailsService implements UserDetailsService {
public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException, DataAccessException {
return new User("testuser");
}
}
| apache-2.0 |
adamdecaf/validate | src/main/scala/Validation.scala | 2013 | package com.decaf.validation
import scala.language.higherKinds
import scala.util.control.NonFatal
final case class Success[E, A](value: A) extends Validation[E, A]
final case class Failure[E, A](error: E) extends Validation[E, A]
sealed trait Validation[+E, +A] {
def isSuccess: Boolean = this match {
case Success(_) => true
case _ => false
}
def isFailure: Boolean = !isSuccess
def map[B](f: A => B): Validation[E, B] = this match {
case Success(a) => Success(f(a))
case Failure(e) => Failure(e)
}
def leftMap[B](f: E => B): Validation[B, A] = this match {
case Success(a) => Success(a)
case Failure(e) => Failure(f(e))
}
def flatMap[EE >: E, AA](f: A => Validation[EE, AA]): Validation[EE, AA] = this match {
case Success(a) => f(a)
case Failure(e) => Failure(e)
}
def bimap[EE, AA](f: E => EE, g: A => AA) = this match {
case Success(a) => Success(g(a))
case Failure(e) => Failure(f(e))
}
def forAll(f: A => Boolean): Boolean = this match {
case Success(a) => f(a)
case _ => false
}
def foreach[B](f: A => B): Unit = this match {
case Success(a) => f(a)
case Failure(_) => ()
}
def orElse[EE >: E, AA >: A](or: => Validation[EE, AA]): Validation[EE, AA] = this match {
case Success(a) => Success(a)
case Failure(_) => or
}
def swap: Validation[A, E] = this match {
case Success(a) => Failure(a)
case Failure(e) => Success(e)
}
def toEither: Either[E, A] = this match {
case Success(a) => Right(a)
case Failure(e) => Left(e)
}
def toList: List[A] = this match {
case Success(a) => List(a)
case _ => List.empty
}
def toOption: Option[A] = this match {
case Success(a) => Some(a)
case _ => None
}
}
object Validation {
def apply[E, A](v: => A) =
try {
Success(v)
} catch {
case NonFatal(e) => Failure(e)
}
def success[E, A](v: A): Validation[E, A] = Success(v)
def failure[E, A](v: E): Validation[E, A] = Failure(v)
}
| apache-2.0 |
droidranger/xygapp | xyg-library/src/test/java/com/ranger/xyg/xyg_library/ExampleUnitTest.java | 404 | package com.ranger.xyg.xyg_library;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | apache-2.0 |
spatstat/s2 | src/RcppExports.cpp | 13033 | // Generated by using Rcpp::compileAttributes() -> do not edit by hand
// Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393
#include <Rcpp.h>
using namespace Rcpp;
// S2CapFromAxisHeight
List S2CapFromAxisHeight(NumericVector axis, double height);
RcppExport SEXP _s2_S2CapFromAxisHeight(SEXP axisSEXP, SEXP heightSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericVector >::type axis(axisSEXP);
Rcpp::traits::input_parameter< double >::type height(heightSEXP);
rcpp_result_gen = Rcpp::wrap(S2CapFromAxisHeight(axis, height));
return rcpp_result_gen;
END_RCPP
}
// S2Cap_contains_point
LogicalVector S2Cap_contains_point(NumericMatrix points, List cap);
RcppExport SEXP _s2_S2Cap_contains_point(SEXP pointsSEXP, SEXP capSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type points(pointsSEXP);
Rcpp::traits::input_parameter< List >::type cap(capSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cap_contains_point(points, cap));
return rcpp_result_gen;
END_RCPP
}
// S2Cap_area
double S2Cap_area(List cap);
RcppExport SEXP _s2_S2Cap_area(SEXP capSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type cap(capSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cap_area(cap));
return rcpp_result_gen;
END_RCPP
}
// S2Cap_GetRectBound
List S2Cap_GetRectBound(List x);
RcppExport SEXP _s2_S2Cap_GetRectBound(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cap_GetRectBound(x));
return rcpp_result_gen;
END_RCPP
}
// S2CellIdFromPoint
List S2CellIdFromPoint(NumericMatrix x, IntegerVector level);
RcppExport SEXP _s2_S2CellIdFromPoint(SEXP xSEXP, SEXP levelSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP);
Rcpp::traits::input_parameter< IntegerVector >::type level(levelSEXP);
rcpp_result_gen = Rcpp::wrap(S2CellIdFromPoint(x, level));
return rcpp_result_gen;
END_RCPP
}
// S2CellId_ToPoint
NumericMatrix S2CellId_ToPoint(List x);
RcppExport SEXP _s2_S2CellId_ToPoint(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2CellId_ToPoint(x));
return rcpp_result_gen;
END_RCPP
}
// S2CellId_ToString
CharacterVector S2CellId_ToString(std::vector<std::string> x);
RcppExport SEXP _s2_S2CellId_ToString(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< std::vector<std::string> >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2CellId_ToString(x));
return rcpp_result_gen;
END_RCPP
}
// S2Cell_vertices_from_token
List S2Cell_vertices_from_token(std::vector<std::string> tokens);
RcppExport SEXP _s2_S2Cell_vertices_from_token(SEXP tokensSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< std::vector<std::string> >::type tokens(tokensSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cell_vertices_from_token(tokens));
return rcpp_result_gen;
END_RCPP
}
// S2Cell_vertices_from_point
List S2Cell_vertices_from_point(NumericMatrix mat, int level);
RcppExport SEXP _s2_S2Cell_vertices_from_point(SEXP matSEXP, SEXP levelSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type mat(matSEXP);
Rcpp::traits::input_parameter< int >::type level(levelSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cell_vertices_from_point(mat, level));
return rcpp_result_gen;
END_RCPP
}
// S2Cell_grid_centers
NumericMatrix S2Cell_grid_centers(int level);
RcppExport SEXP _s2_S2Cell_grid_centers(SEXP levelSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< int >::type level(levelSEXP);
rcpp_result_gen = Rcpp::wrap(S2Cell_grid_centers(level));
return rcpp_result_gen;
END_RCPP
}
// S2Covering_internal
List S2Covering_internal(List x, std::string type, int max_cells, int min_level, int max_level, bool interior);
RcppExport SEXP _s2_S2Covering_internal(SEXP xSEXP, SEXP typeSEXP, SEXP max_cellsSEXP, SEXP min_levelSEXP, SEXP max_levelSEXP, SEXP interiorSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< std::string >::type type(typeSEXP);
Rcpp::traits::input_parameter< int >::type max_cells(max_cellsSEXP);
Rcpp::traits::input_parameter< int >::type min_level(min_levelSEXP);
Rcpp::traits::input_parameter< int >::type max_level(max_levelSEXP);
Rcpp::traits::input_parameter< bool >::type interior(interiorSEXP);
rcpp_result_gen = Rcpp::wrap(S2Covering_internal(x, type, max_cells, min_level, max_level, interior));
return rcpp_result_gen;
END_RCPP
}
// S2LatLngRect
List S2LatLngRect(NumericVector lo, NumericVector hi);
RcppExport SEXP _s2_S2LatLngRect(SEXP loSEXP, SEXP hiSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericVector >::type lo(loSEXP);
Rcpp::traits::input_parameter< NumericVector >::type hi(hiSEXP);
rcpp_result_gen = Rcpp::wrap(S2LatLngRect(lo, hi));
return rcpp_result_gen;
END_RCPP
}
// S2LatLngRect_area
double S2LatLngRect_area(List x);
RcppExport SEXP _s2_S2LatLngRect_area(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2LatLngRect_area(x));
return rcpp_result_gen;
END_RCPP
}
// S2Point_interpolate
NumericMatrix S2Point_interpolate(NumericMatrix x, double eps);
RcppExport SEXP _s2_S2Point_interpolate(SEXP xSEXP, SEXP epsSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP);
Rcpp::traits::input_parameter< double >::type eps(epsSEXP);
rcpp_result_gen = Rcpp::wrap(S2Point_interpolate(x, eps));
return rcpp_result_gen;
END_RCPP
}
// S2PolygonBuild
List S2PolygonBuild(List x, bool validate, bool xor_edges, double vertex_merge_radius, double edge_splice_fraction, bool undirected_edges);
RcppExport SEXP _s2_S2PolygonBuild(SEXP xSEXP, SEXP validateSEXP, SEXP xor_edgesSEXP, SEXP vertex_merge_radiusSEXP, SEXP edge_splice_fractionSEXP, SEXP undirected_edgesSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< bool >::type validate(validateSEXP);
Rcpp::traits::input_parameter< bool >::type xor_edges(xor_edgesSEXP);
Rcpp::traits::input_parameter< double >::type vertex_merge_radius(vertex_merge_radiusSEXP);
Rcpp::traits::input_parameter< double >::type edge_splice_fraction(edge_splice_fractionSEXP);
Rcpp::traits::input_parameter< bool >::type undirected_edges(undirected_edgesSEXP);
rcpp_result_gen = Rcpp::wrap(S2PolygonBuild(x, validate, xor_edges, vertex_merge_radius, edge_splice_fraction, undirected_edges));
return rcpp_result_gen;
END_RCPP
}
// S2Polygon_union
List S2Polygon_union(List x, List y);
RcppExport SEXP _s2_S2Polygon_union(SEXP xSEXP, SEXP ySEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< List >::type y(ySEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygon_union(x, y));
return rcpp_result_gen;
END_RCPP
}
// S2Polygon_intersection
List S2Polygon_intersection(List x, List y);
RcppExport SEXP _s2_S2Polygon_intersection(SEXP xSEXP, SEXP ySEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< List >::type y(ySEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygon_intersection(x, y));
return rcpp_result_gen;
END_RCPP
}
// S2Polygons_intersection
List S2Polygons_intersection(List x, List y);
RcppExport SEXP _s2_S2Polygons_intersection(SEXP xSEXP, SEXP ySEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< List >::type y(ySEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygons_intersection(x, y));
return rcpp_result_gen;
END_RCPP
}
// S2Polygons_intersect
List S2Polygons_intersect(List x, List y);
RcppExport SEXP _s2_S2Polygons_intersect(SEXP xSEXP, SEXP ySEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
Rcpp::traits::input_parameter< List >::type y(ySEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygons_intersect(x, y));
return rcpp_result_gen;
END_RCPP
}
// S2Polygons_centroid
NumericMatrix S2Polygons_centroid(List x);
RcppExport SEXP _s2_S2Polygons_centroid(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygons_centroid(x));
return rcpp_result_gen;
END_RCPP
}
// S2Polygons_area
NumericVector S2Polygons_area(List x);
RcppExport SEXP _s2_S2Polygons_area(SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< List >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygons_area(x));
return rcpp_result_gen;
END_RCPP
}
// S2Polygon_contains_point
LogicalVector S2Polygon_contains_point(NumericMatrix points, List poly, bool approx);
RcppExport SEXP _s2_S2Polygon_contains_point(SEXP pointsSEXP, SEXP polySEXP, SEXP approxSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type points(pointsSEXP);
Rcpp::traits::input_parameter< List >::type poly(polySEXP);
Rcpp::traits::input_parameter< bool >::type approx(approxSEXP);
rcpp_result_gen = Rcpp::wrap(S2Polygon_contains_point(points, poly, approx));
return rcpp_result_gen;
END_RCPP
}
// S2Polyline_dist
NumericVector S2Polyline_dist(NumericMatrix line, NumericMatrix x);
RcppExport SEXP _s2_S2Polyline_dist(SEXP lineSEXP, SEXP xSEXP) {
BEGIN_RCPP
Rcpp::RObject rcpp_result_gen;
Rcpp::RNGScope rcpp_rngScope_gen;
Rcpp::traits::input_parameter< NumericMatrix >::type line(lineSEXP);
Rcpp::traits::input_parameter< NumericMatrix >::type x(xSEXP);
rcpp_result_gen = Rcpp::wrap(S2Polyline_dist(line, x));
return rcpp_result_gen;
END_RCPP
}
static const R_CallMethodDef CallEntries[] = {
{"_s2_S2CapFromAxisHeight", (DL_FUNC) &_s2_S2CapFromAxisHeight, 2},
{"_s2_S2Cap_contains_point", (DL_FUNC) &_s2_S2Cap_contains_point, 2},
{"_s2_S2Cap_area", (DL_FUNC) &_s2_S2Cap_area, 1},
{"_s2_S2Cap_GetRectBound", (DL_FUNC) &_s2_S2Cap_GetRectBound, 1},
{"_s2_S2CellIdFromPoint", (DL_FUNC) &_s2_S2CellIdFromPoint, 2},
{"_s2_S2CellId_ToPoint", (DL_FUNC) &_s2_S2CellId_ToPoint, 1},
{"_s2_S2CellId_ToString", (DL_FUNC) &_s2_S2CellId_ToString, 1},
{"_s2_S2Cell_vertices_from_token", (DL_FUNC) &_s2_S2Cell_vertices_from_token, 1},
{"_s2_S2Cell_vertices_from_point", (DL_FUNC) &_s2_S2Cell_vertices_from_point, 2},
{"_s2_S2Cell_grid_centers", (DL_FUNC) &_s2_S2Cell_grid_centers, 1},
{"_s2_S2Covering_internal", (DL_FUNC) &_s2_S2Covering_internal, 6},
{"_s2_S2LatLngRect", (DL_FUNC) &_s2_S2LatLngRect, 2},
{"_s2_S2LatLngRect_area", (DL_FUNC) &_s2_S2LatLngRect_area, 1},
{"_s2_S2Point_interpolate", (DL_FUNC) &_s2_S2Point_interpolate, 2},
{"_s2_S2PolygonBuild", (DL_FUNC) &_s2_S2PolygonBuild, 6},
{"_s2_S2Polygon_union", (DL_FUNC) &_s2_S2Polygon_union, 2},
{"_s2_S2Polygon_intersection", (DL_FUNC) &_s2_S2Polygon_intersection, 2},
{"_s2_S2Polygons_intersection", (DL_FUNC) &_s2_S2Polygons_intersection, 2},
{"_s2_S2Polygons_intersect", (DL_FUNC) &_s2_S2Polygons_intersect, 2},
{"_s2_S2Polygons_centroid", (DL_FUNC) &_s2_S2Polygons_centroid, 1},
{"_s2_S2Polygons_area", (DL_FUNC) &_s2_S2Polygons_area, 1},
{"_s2_S2Polygon_contains_point", (DL_FUNC) &_s2_S2Polygon_contains_point, 3},
{"_s2_S2Polyline_dist", (DL_FUNC) &_s2_S2Polyline_dist, 2},
{NULL, NULL, 0}
};
RcppExport void R_init_s2(DllInfo *dll) {
R_registerRoutines(dll, NULL, CallEntries, NULL, NULL);
R_useDynamicSymbols(dll, FALSE);
}
| apache-2.0 |
nickmilon/milonpy | milonpy/utils/basic2.py | 14147 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#######################################################
'''
module: utilities.basic2
Created:Aug 21, 2012
author: nickmilon
Description: Description: Simple utilities (2) and Vars - Very Limited IMPORTS HERE !
'''
#######################################################
from sys import stdout
from datetime import datetime , timedelta
from basic import FMT_dtGen,FMT_tGen, color_txt ,color_switch_txt,dictDot
from time import sleep ,time,mktime
import re
from random import random
def re_is_sameLen(txt,rexp):return len(txt)==len(rexp.findall(txt))
def re_is_same(txt,rexp):return txt==u''.join(rexp.findall(txt))
def re_diff(txt,rexp):return ''.join(list(set([c for c in txt]) - set(rexp.findall(txt))))
#re_gr=re.compile(ur'[\u03AC-\u03CE]|[;\s]', re.IGNORECASE| re.VERBOSE| re.UNICODE |re.MULTILINE)
def lst_randomize(lst):
"returns list in random order"
return [i[1] for i in [[random(),i] for i in sorted(lst)] ]
def time_seconds_since_epoch(dt=None):
if dt is None:dt=datetime.utcnow()
return mktime(dt.timetuple())+1e-6*dt.microsecond
def autoRetry(exceptionOrTuple,retries=3,sleepSeconds=1, BackOfFactor=1,loggerFun=None):
""" exceptionOrTuple= exception or tuple of exceptions,BackOfFactor=factor to back off on each retry loggerFun i.e. logger.info """
def wrapper(func):
def fun_call(*args, **kwargs):
tries = 0
while tries < retries:
try:
return func(*args, **kwargs)
except exceptionOrTuple, e:
tries += 1
if loggerFun:loggerFun("exception [%s] e=[%s] handled tries :%d sleeping[%f]" % (exceptionOrTuple ,e,tries,sleepSeconds * tries * BackOfFactor) )
sleep(sleepSeconds * tries * BackOfFactor) #* tries)
raise
return fun_call
return wrapper
def parseJSfunFromFile(filepath,functionName):
"""
helper function to get a js function string from a file containing js functions. Function must be named starting in first column and file must end with //eof//
lazyloads re
"""
with open( filepath) as fin:
r=re.search("(^.*?)(?P<fun>function\s+?%s.*?)(^fun|//eof//)" % functionName,fin.read(),re.MULTILINE|re.DOTALL)
return r.group('fun').strip() if r else False
def stdout_switchColor(color):
stdout.write (color_switch_txt(color))
def stdout_write_flush(txt,stAfter="\r",color=None):
if color:txt= color_txt(color,txt)
stdout.write("%s%s" %(txt,stAfter) )
stdout.flush()
class timeElapsed(object):
""" overwrite str_dtimes str_deltas to return "" to exclude this form output string
@todo: logging handler
"""
def __init__(self, cnt_max=1,name_str=""):
self.name_str=name_str
self.cnt_max= cnt_max
self.dt_start=datetime.utcnow()
self.dt_last=self.dt_start
self.dt_current=self.dt_start
self.cnt=0
self.cnt_last=0
self.cnt_last_dif=0
self.perc_done=0.0
self.time_remaining=0
self.time_elapsed_since_start=timedelta(0)
self.time_elapsed_since_last=timedelta(0)
self.time_remaining =timedelta(0)
self.units=['sec','min','hour']
self.set_cnt_max(cnt_max)
def set_cnt_max(self,val):
self.cnt_max=val
self.frmt_str="%s%d%s" %("%",len(str(val)),"d" )
def set_auto_unit(self,velocity,unit_idx=0):
if velocity < 1 and unit_idx < 2:
velocity=velocity * 60
unit_idx+=1
return self.set_auto_unit(velocity, unit_idx)
else:
return velocity, self.units[unit_idx]
def frmt_max(self,val):
return self.frmt_str % val
def update(self,cur_val,getStr=True,):
cur_val=float(cur_val)
if cur_val > self.cnt_max:self.set_cnt_max(self.cnt_max+int(cur_val/10))
self.dt_current=datetime.utcnow()
self.time_elapsed_since_start = self.dt_current- self.dt_start
self.time_elapsed_since_last=self.dt_current- self.dt_last
self.cnt_last_dif=self.cnt_last-cur_val
self.perc_done=cur_val/self.cnt_max
self.time_remaining =timedelta(seconds=int ( self.time_elapsed_since_start.total_seconds() * ( (1-self.perc_done)/self.perc_done)))
self.cnt=cur_val
self.v_start= self.cnt/self.time_elapsed_since_start.total_seconds()
self.v_last= self.cnt_last_dif/self.time_elapsed_since_last.total_seconds()
self.dt_last=self.dt_current
self.cnt_last=cur_val
return self.toStr() if getStr else True
def update_last(self,cur_val,getStr=True):
self.cnt_max=cur_val
return self.update(cur_val,getStr)
def str_counters(self):
return u"|%s of %s" %(self.frmt_max(self.cnt), self.frmt_max(self.cnt_max))
def str_dtimes(self):
return u"⌚ %s %s %s" % (self.dt_start.strftime(FMT_dtGen),self.dt_current.strftime(FMT_tGen), (self.dt_current+self.time_remaining).strftime(FMT_tGen))
def str_tdeltas(self):
return u"⌛ %s %s %s" %(self._str_tdelta(self.time_elapsed_since_start),self._str_tdelta(self.time_elapsed_since_last), self._str_tdelta(self.time_remaining) )
@staticmethod
def _str_tdelta(tdelta):
str_td=str(tdelta)
tmp=str_td.find(".")
if tmp !=-1 : str_td= str_td[:tmp]
return u"%8s" % str_td
def toStr(self):
return u"[%s:%6.2f%%%s%s%s]" %(self.name_str,100* self.perc_done, self.str_counters(),
self.str_tdeltas(),self.str_dtimes() )
class SubToEvent(object):
''' lightwaight Event handler modeled after Peter Thatcher's http://www.valuedlessons.com/2008/04/events-in-python.html
usage:
watcher = SubToEvent()
def log_docs(doc):print doc
watcher += log_docs
watcher += lambda x:str(x)
watcher.stop()
'''
def __init__(self,channelName=''):
self.channelName=channelName
self.handlers = set()
def handle(self, handler):
self.handlers.add(handler)
return self
def unhandle(self, handler):
try:
self.handlers.remove(handler)
except:
raise ValueError("No_such_handler")
return self
def fire(self, *args, **kargs):
for handler in self.handlers:
handler(*args, **kargs)
def fireTopic(self,topic=None,verb=None,payload=None):
self.fire ((self.channelName,topic,verb,payload))
def getHandlerCount(self):
return len(self.handlers)
__iadd__ = handle
__isub__ = unhandle
__call__ = fire
__len__ = getHandlerCount
class multiOrderedDict(object):
'''
deletes can't be multi
'''
def __init__(self,lst):
self.lstDic=lst
def __getitem__ (self,key):
return self._getOrSetDictItem(key)
def __setitem__(self, key, val):
return self._getOrSetDictItem(key,True,val)
def __delitem__ (self, key):
return self._getOrSetDictItem(key,delete=True)
def get(self,key,orVal=None):
try:
return self[key]
except KeyError:
return orVal
def keys(self):
return[i[0] for i in self.lstDic if self.isKey(i[0])]
def values(self):
return [self[i] for i in self.keys()]
def isKey(self,k):
return True
def _getOrSetDictItem (self,key,setVal=False,newVal=None,multi=False,delete=False):
idx=[]
for n,i in enumerate(self.lstDic):
if i[0]==key and self.isKey(i[0]):
idx.append(n)
if setVal:self.lstDic[n]=[i[0],newVal]
if not multi: break
if len(idx)>0:
if delete:
self.lstDic.pop(idx[0]) #can't be multi
return None
rt= [self.lstDic[i][1:] for i in idx ]
if multi:
return rt
else:
return rt[0][0]
else:
if setVal:
self.lstDic.append([key,newVal])
return newVal
else:
raise KeyError (key)
def toDict(self):
return dict(zip(self.keys(),self.values()))
def toString(self):
return str(self.toDict())
__str__ = toString
class confFileDict(multiOrderedDict):
def __init__(self,path,skipBlanks=True,skipRemarks=True):
self.path=path
with open(self.path) as fin:
rlines=fin.readlines()
if skipBlanks:rlines=[i for i in rlines if not i=='\n']
if skipRemarks:rlines=[i for i in rlines if not i.startswith("#")]
lstDic=[ map(lambda x: x.strip(), i.split("=") ) for i in rlines]
super(confFileDict, self).__init__(lstDic)
def isKey(self,key):
return key !='' and not key.startswith("#")
def toStr(self):
s=''
for i in self.lstDic:
s+= "=".join(i)+'\n'
return s.rstrip()
def toFile(self,path=None):
if not path:path=self.path
with open(path, 'w') as fl:
fl.write(self.toStr)
def PrintTiming(func):
"""set up a decorator function for timing"""
def wrapper(*args, **kargs):
t1 = time.time()
res = func(*args, **kargs)
tel = time.time()-t1
timeformated = time.strftime( "%H:%M:%S",time.gmtime(tel))
print '-'*5 + '%s took %0.3f ms' % (func.func_name + str(kargs) + str(args), (tel)*1000.0) + '|' + timeformated + '|'+ '-'*10
return res
return wrapper
def totalsVertical(orgD,resD,funct,initFunc):
'''Apply funct to resD dict values by orgD values, creates keys in resD if do not exist
usufull for vertical persentage and totals
attention : it is ditractive replacing resD with results
i.e: to incr resD values by OrgD values resultAply(orgDict,resultsDict,lambda x,y:x+y, lambda x:0)
to find perc of org : .resultAply(res,dorg[0].value,lambda x,y:100*y/x if x!=0 else None,None)
'''
for k in orgD.keys():
if isinstance(orgD[k],dict):
if resD.get(k):
totalsVertical(orgD[k],resD[k],funct,initFunc)
else:
if initFunc:
resD[k]=totalsVertical(orgD[k],dictDot({}),funct,initFunc)
else: continue
elif isinstance(orgD[k],(float,int)):
if resD.get(k,False) is False :
if initFunc:
resD[k]=initFunc(orgD[k])
else:
continue
resD[k] = funct(orgD[k],resD[k])
else:
if initFunc:resD[k]=orgD[k]
return resD
def totalsVertSimple(orgD,resD,funct):
''' simplified and faster version of totalsVertical assumes all key/values of orgD are present in resD
'''
for k in orgD.keys():
if isinstance(orgD[k],dict):totalsVertSimple(orgD[k],resD[k],funct)
elif isinstance(orgD[k],(float,int)):orgD[k]=funct(orgD[k],resD[k])
return orgD
def totalsHorizontal(value,a_dict,funct=lambda x,y:100*x/y):
for k in a_dict.keys():
if isinstance(a_dict[k],dict):totalsHorizontal(value,a_dict[k])
elif isinstance(a_dict[k],(float,int)):a_dict[k]=funct(a_dict[k],value)
return a_dict
class TextWrapper(object):
''' http://jrgraphix.net/r/Unicode/ '''
elipsis=u"\u2026" # "…"
numbers=u"₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎"
def __init__(self, maxLen=140,minLen=100, contChr=u'⎘',inclNumbers=True,strFlag=u'',strFirst=u'',strRest=u'',strAll=u''):
self.contChr=contChr
self.inlNumbers=inclNumbers
self.strFlag=strFlag
self.strFirst=strFirst
self.strRest=strRest
self.strAll=strAll
self.maxLen=maxLen
self.minLen=minLen
def compineStr(self,s,cnt,totalCnt=None):
return "%s%s%s%s%s" %(self.strFlag,self.formatNumOfTotal(cnt+1,totalCnt) if self.inlNumbers else u'', self.strAll, self.strFirst if cnt==0 else self.strRest,s)
def splits(self,astr):
n=self.maxLen-1- len(self.contChr)
minLen=self.minLen
cnt=0
s=self.compineStr(astr, cnt)
while len(s) > n:
cnt+=1
rf=s[0:n].rfind(u'\n',minLen)
if rf == -1:rf=s[0:n].rfind(u'.',minLen)
if rf == -1:rf=s[0:n].rfind(u' ',minLen)
spltn = rf+1 if rf !=-1 else n
#print "(%3d) %3d %3d %3d [%s]" %(cnt, rf,n,spltn,s[0:n])
rt=s[:spltn].rstrip()
remainingStr=s[spltn:]
if self.contChr !=u'':
if len(remainingStr)>1:rt+=self.contChr
else:
rt+=remainingStr
remainingStr=u''
yield rt
s=self.compineStr(remainingStr, cnt) if remainingStr !=u'' else u''
yield s
def formatNumOfTotal(self,cnt, totalCnt=None):
return u"%s∕%s" %(self.formatNum(cnt),u'??' if totalCnt is None else self.formatNum(totalCnt)) #'∕' is not '/' but math '\u2215'
def formatNum(self,num):
header=map(int,str(num))
rt=[self.numbers[i] for i in header]
return ''.join(rt)
def format(self,text):
rt=[]
for i in self.splits(text):
if i !=u'':rt.append(i)
if self.inlNumbers:
rt2=[]
maxCnt=len(rt)
for cnt,vl in enumerate(rt):
old= self.formatNumOfTotal(cnt+1,None)
new= u'' if maxCnt == 1 else self.formatNumOfTotal(cnt+1,maxCnt)
if new !=u'':new += u' '* (len(old)-len(new))
rt2.append(vl.replace(old, new , 1))
return rt2
return rt
################## tests
def test_timeElapsed(x):
et=timeElapsed(x,"foo")
for i in range(1,x):
sleep(1)
print et.update(i, True)
print et.update_last(i)
###################
| apache-2.0 |
zeinsteinz/tacker | tacker/extensions/nfvo_plugins/network_service.py | 1605 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from tacker.common import exceptions
from tacker.services import service_base
@six.add_metaclass(abc.ABCMeta)
class NSPluginBase(service_base.NFVPluginBase):
@abc.abstractmethod
def create_nsd(self, context, nsd):
pass
@abc.abstractmethod
def delete_nsd(self, context, nsd_id):
pass
@abc.abstractmethod
def get_nsd(self, context, nsd_id, fields=None):
pass
@abc.abstractmethod
def get_nsds(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def create_ns(self, context, ns):
pass
@abc.abstractmethod
def get_nss(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_ns(self, context, ns_id, fields=None):
pass
@abc.abstractmethod
def delete_ns(self, context, ns_id):
pass
class NSDNotFound(exceptions.NotFound):
message = _('NSD %(nsd_id)s could not be found')
class NSNotFound(exceptions.NotFound):
message = _('NS %(ns_id)s could not be found')
| apache-2.0 |
mikethebeer/scraty | src/index.js | 1801 | import React from 'react'
import ReactDOM from 'react-dom'
import { Provider } from 'react-redux'
import { createStore } from 'redux'
import App from './components/App'
import registerServiceWorker from './registerServiceWorker'
import rootReducer from './reducers'
import { HTTP_BACKEND_URL, WS_BACKEND_URL } from './config/config'
import { addStory, deleteStory, updateStory } from './actions/story'
import { addTask, deleteTask, updateTask } from './actions/task'
const store = createStore(rootReducer)
function updateStoryView(action, story) {
switch (action) {
case 'added':
store.dispatch(addStory(story))
break
case 'deleted':
store.dispatch(deleteStory(story))
break
case 'updated':
store.dispatch(updateStory(story))
break
default:
console.warn("update story - no action found")
}
}
function updateTaskView(action, task) {
switch (action) {
case 'added':
store.dispatch(addTask(task))
break
case 'deleted':
store.dispatch(deleteTask(task))
break
case 'updated':
store.dispatch(updateTask(task))
break
default:
console.warn("update task - no action found")
}
}
fetch(HTTP_BACKEND_URL + '/api/stories/')
.then(response => response.json())
.then(data => data.stories.map(story => store.dispatch(addStory(story))))
var ws = new WebSocket(WS_BACKEND_URL + '/websocket')
ws.onmessage = (evt) => {
var data = JSON.parse(evt.data)
if (data.object_type === 'story') {
updateStoryView(data.action, data.object)
} else {
updateTaskView(data.action, data.object)
}
}
ReactDOM.render(
<Provider store={store}>
<App />
</Provider>,
document.getElementById('root')
)
registerServiceWorker()
| apache-2.0 |
aturon/rust | src/librustc/middle/trans/datum.rs | 21852 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* See the section on datums in `doc.rs` for an overview of what
* Datums are and how they are intended to be used.
*/
use lib;
use lib::llvm::ValueRef;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::expr;
use middle::trans::glue;
use middle::trans::tvec;
use middle::trans::type_of;
use middle::ty;
use util::ppaux::{ty_to_str};
use syntax::ast;
/**
* A `Datum` encapsulates the result of evaluating an expression. It
* describes where the value is stored, what Rust type the value has,
* whether it is addressed by reference, and so forth. Please refer
* the section on datums in `doc.rs` for more details.
*/
#[deriving(Clone)]
pub struct Datum<K> {
/// The llvm value. This is either a pointer to the Rust value or
/// the value itself, depending on `kind` below.
pub val: ValueRef,
/// The rust type of the value.
pub ty: ty::t,
/// Indicates whether this is by-ref or by-value.
pub kind: K,
}
pub struct DatumBlock<'a, K> {
pub bcx: &'a Block<'a>,
pub datum: Datum<K>,
}
pub enum Expr {
/// a fresh value that was produced and which has no cleanup yet
/// because it has not yet "landed" into its permanent home
RvalueExpr(Rvalue),
/// `val` is a pointer into memory for which a cleanup is scheduled
/// (and thus has type *T). If you move out of an Lvalue, you must
/// zero out the memory (FIXME #5016).
LvalueExpr,
}
#[deriving(Clone)]
pub struct Lvalue;
pub struct Rvalue {
pub mode: RvalueMode
}
pub fn Rvalue(m: RvalueMode) -> Rvalue {
Rvalue { mode: m }
}
// Make Datum linear for more type safety.
impl Drop for Rvalue {
fn drop(&mut self) { }
}
#[deriving(Eq, TotalEq, Hash)]
pub enum RvalueMode {
/// `val` is a pointer to the actual value (and thus has type *T)
ByRef,
/// `val` is the actual value (*only used for immediates* like ints, ptrs)
ByValue,
}
pub fn Datum<K:KindOps>(val: ValueRef, ty: ty::t, kind: K) -> Datum<K> {
Datum { val: val, ty: ty, kind: kind }
}
pub fn DatumBlock<'a, K>(bcx: &'a Block<'a>,
datum: Datum<K>)
-> DatumBlock<'a, K> {
DatumBlock { bcx: bcx, datum: datum }
}
pub fn immediate_rvalue(val: ValueRef, ty: ty::t) -> Datum<Rvalue> {
return Datum(val, ty, Rvalue(ByValue));
}
pub fn immediate_rvalue_bcx<'a>(bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> DatumBlock<'a, Rvalue> {
return DatumBlock(bcx, immediate_rvalue(val, ty))
}
pub fn lvalue_scratch_datum<'a, A>(bcx: &'a Block<'a>,
ty: ty::t,
name: &str,
zero: bool,
scope: cleanup::ScopeId,
arg: A,
populate: |A, &'a Block<'a>, ValueRef|
-> &'a Block<'a>)
-> DatumBlock<'a, Lvalue> {
/*!
* Allocates temporary space on the stack using alloca() and
* returns a by-ref Datum pointing to it. The memory will be
* dropped upon exit from `scope`. The callback `populate` should
* initialize the memory. If `zero` is true, the space will be
* zeroed when it is allocated; this is not necessary unless `bcx`
* does not dominate the end of `scope`.
*/
let llty = type_of::type_of(bcx.ccx(), ty);
let scratch = alloca_maybe_zeroed(bcx, llty, name, zero);
// Subtle. Populate the scratch memory *before* scheduling cleanup.
let bcx = populate(arg, bcx, scratch);
bcx.fcx.schedule_drop_mem(scope, scratch, ty);
DatumBlock(bcx, Datum(scratch, ty, Lvalue))
}
pub fn rvalue_scratch_datum(bcx: &Block,
ty: ty::t,
name: &str)
-> Datum<Rvalue> {
/*!
* Allocates temporary space on the stack using alloca() and
* returns a by-ref Datum pointing to it. If `zero` is true, the
* space will be zeroed when it is allocated; this is normally not
* necessary, but in the case of automatic rooting in match
* statements it is possible to have temporaries that may not get
* initialized if a certain arm is not taken, so we must zero
* them. You must arrange any cleanups etc yourself!
*/
let llty = type_of::type_of(bcx.ccx(), ty);
let scratch = alloca_maybe_zeroed(bcx, llty, name, false);
Datum(scratch, ty, Rvalue(ByRef))
}
pub fn appropriate_rvalue_mode(ccx: &CrateContext, ty: ty::t) -> RvalueMode {
/*!
* Indicates the "appropriate" mode for this value,
* which is either by ref or by value, depending
* on whether type is immediate or not.
*/
if type_is_zero_size(ccx, ty) {
ByValue
} else if type_is_immediate(ccx, ty) {
ByValue
} else {
ByRef
}
}
fn add_rvalue_clean(mode: RvalueMode,
fcx: &FunctionContext,
scope: cleanup::ScopeId,
val: ValueRef,
ty: ty::t) {
match mode {
ByValue => { fcx.schedule_drop_immediate(scope, val, ty); }
ByRef => { fcx.schedule_drop_mem(scope, val, ty); }
}
}
pub trait KindOps {
/**
* Take appropriate action after the value in `datum` has been
* stored to a new location.
*/
fn post_store<'a>(&self,
bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> &'a Block<'a>;
/**
* True if this mode is a reference mode, meaning that the datum's
* val field is a pointer to the actual value
*/
fn is_by_ref(&self) -> bool;
/**
* Converts to an Expr kind
*/
fn to_expr_kind(self) -> Expr;
}
impl KindOps for Rvalue {
fn post_store<'a>(&self,
bcx: &'a Block<'a>,
_val: ValueRef,
_ty: ty::t)
-> &'a Block<'a> {
// No cleanup is scheduled for an rvalue, so we don't have
// to do anything after a move to cancel or duplicate it.
bcx
}
fn is_by_ref(&self) -> bool {
self.mode == ByRef
}
fn to_expr_kind(self) -> Expr {
RvalueExpr(self)
}
}
impl KindOps for Lvalue {
fn post_store<'a>(&self,
bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> &'a Block<'a> {
/*!
* If an lvalue is moved, we must zero out the memory in which
* it resides so as to cancel cleanup. If an @T lvalue is
* copied, we must increment the reference count.
*/
if ty::type_needs_drop(bcx.tcx(), ty) {
if ty::type_moves_by_default(bcx.tcx(), ty) {
// cancel cleanup of affine values by zeroing out
let () = zero_mem(bcx, val, ty);
bcx
} else {
// incr. refcount for @T or newtype'd @T
glue::take_ty(bcx, val, ty)
}
} else {
bcx
}
}
fn is_by_ref(&self) -> bool {
true
}
fn to_expr_kind(self) -> Expr {
LvalueExpr
}
}
impl KindOps for Expr {
fn post_store<'a>(&self,
bcx: &'a Block<'a>,
val: ValueRef,
ty: ty::t)
-> &'a Block<'a> {
match *self {
LvalueExpr => Lvalue.post_store(bcx, val, ty),
RvalueExpr(ref r) => r.post_store(bcx, val, ty),
}
}
fn is_by_ref(&self) -> bool {
match *self {
LvalueExpr => Lvalue.is_by_ref(),
RvalueExpr(ref r) => r.is_by_ref()
}
}
fn to_expr_kind(self) -> Expr {
self
}
}
impl Datum<Rvalue> {
pub fn add_clean(self,
fcx: &FunctionContext,
scope: cleanup::ScopeId)
-> ValueRef {
/*!
* Schedules a cleanup for this datum in the given scope.
* That means that this datum is no longer an rvalue datum;
* hence, this function consumes the datum and returns the
* contained ValueRef.
*/
add_rvalue_clean(self.kind.mode, fcx, scope, self.val, self.ty);
self.val
}
pub fn to_lvalue_datum_in_scope<'a>(self,
bcx: &'a Block<'a>,
name: &str,
scope: cleanup::ScopeId)
-> DatumBlock<'a, Lvalue> {
/*!
* Returns an lvalue datum (that is, a by ref datum with
* cleanup scheduled). If `self` is not already an lvalue,
* cleanup will be scheduled in the temporary scope for `expr_id`.
*/
let fcx = bcx.fcx;
match self.kind.mode {
ByRef => {
add_rvalue_clean(ByRef, fcx, scope, self.val, self.ty);
DatumBlock(bcx, Datum(self.val, self.ty, Lvalue))
}
ByValue => {
lvalue_scratch_datum(
bcx, self.ty, name, false, scope, self,
|this, bcx, llval| this.store_to(bcx, llval))
}
}
}
pub fn to_ref_datum<'a>(self, bcx: &'a Block<'a>) -> DatumBlock<'a, Rvalue> {
let mut bcx = bcx;
match self.kind.mode {
ByRef => DatumBlock(bcx, self),
ByValue => {
let scratch = rvalue_scratch_datum(bcx, self.ty, "to_ref");
bcx = self.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
}
}
}
pub fn to_appropriate_datum<'a>(self,
bcx: &'a Block<'a>)
-> DatumBlock<'a, Rvalue> {
match self.appropriate_rvalue_mode(bcx.ccx()) {
ByRef => {
self.to_ref_datum(bcx)
}
ByValue => {
match self.kind.mode {
ByValue => DatumBlock(bcx, self),
ByRef => {
let llval = load(bcx, self.val, self.ty);
DatumBlock(bcx, Datum(llval, self.ty, Rvalue(ByValue)))
}
}
}
}
}
}
/**
* Methods suitable for "expr" datums that could be either lvalues or
* rvalues. These include coercions into lvalues/rvalues but also a number
* of more general operations. (Some of those operations could be moved to
* the more general `impl<K> Datum<K>`, but it's convenient to have them
* here since we can `match self.kind` rather than having to implement
* generic methods in `KindOps`.)
*/
impl Datum<Expr> {
fn match_kind<R>(self,
if_lvalue: |Datum<Lvalue>| -> R,
if_rvalue: |Datum<Rvalue>| -> R)
-> R {
let Datum { val, ty, kind } = self;
match kind {
LvalueExpr => if_lvalue(Datum(val, ty, Lvalue)),
RvalueExpr(r) => if_rvalue(Datum(val, ty, r)),
}
}
#[allow(dead_code)] // potentially useful
pub fn assert_lvalue(self, bcx: &Block) -> Datum<Lvalue> {
/*!
* Asserts that this datum *is* an lvalue and returns it.
*/
self.match_kind(
|d| d,
|_| bcx.sess().bug("assert_lvalue given rvalue"))
}
pub fn assert_rvalue(self, bcx: &Block) -> Datum<Rvalue> {
/*!
* Asserts that this datum *is* an lvalue and returns it.
*/
self.match_kind(
|_| bcx.sess().bug("assert_rvalue given lvalue"),
|r| r)
}
pub fn store_to_dest<'a>(self,
bcx: &'a Block<'a>,
dest: expr::Dest,
expr_id: ast::NodeId)
-> &'a Block<'a> {
match dest {
expr::Ignore => {
self.add_clean_if_rvalue(bcx, expr_id);
bcx
}
expr::SaveIn(addr) => {
self.store_to(bcx, addr)
}
}
}
pub fn add_clean_if_rvalue<'a>(self,
bcx: &'a Block<'a>,
expr_id: ast::NodeId) {
/*!
* Arranges cleanup for `self` if it is an rvalue. Use when
* you are done working with a value that may need drop.
*/
self.match_kind(
|_| { /* Nothing to do, cleanup already arranged */ },
|r| {
let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
r.add_clean(bcx.fcx, scope);
})
}
pub fn clean<'a>(self,
bcx: &'a Block<'a>,
name: &'static str,
expr_id: ast::NodeId)
-> &'a Block<'a> {
/*!
* Ensures that `self` will get cleaned up, if it is not an lvalue
* already.
*/
self.to_lvalue_datum(bcx, name, expr_id).bcx
}
pub fn to_lvalue_datum<'a>(self,
bcx: &'a Block<'a>,
name: &str,
expr_id: ast::NodeId)
-> DatumBlock<'a, Lvalue> {
self.match_kind(
|l| DatumBlock(bcx, l),
|r| {
let scope = cleanup::temporary_scope(bcx.tcx(), expr_id);
r.to_lvalue_datum_in_scope(bcx, name, scope)
})
}
pub fn to_rvalue_datum<'a>(self,
bcx: &'a Block<'a>,
name: &'static str)
-> DatumBlock<'a, Rvalue> {
/*!
* Ensures that we have an rvalue datum (that is, a datum with
* no cleanup scheduled).
*/
self.match_kind(
|l| {
let mut bcx = bcx;
match l.appropriate_rvalue_mode(bcx.ccx()) {
ByRef => {
let scratch = rvalue_scratch_datum(bcx, l.ty, name);
bcx = l.store_to(bcx, scratch.val);
DatumBlock(bcx, scratch)
}
ByValue => {
let v = load(bcx, l.val, l.ty);
bcx = l.kind.post_store(bcx, l.val, l.ty);
DatumBlock(bcx, Datum(v, l.ty, Rvalue(ByValue)))
}
}
},
|r| DatumBlock(bcx, r))
}
}
/**
* Methods suitable only for lvalues. These include the various
* operations to extract components out of compound data structures,
* such as extracting the field from a struct or a particular element
* from an array.
*/
impl Datum<Lvalue> {
pub fn to_llref(self) -> ValueRef {
/*!
* Converts a datum into a by-ref value. The datum type must
* be one which is always passed by reference.
*/
self.val
}
pub fn get_element(&self,
ty: ty::t,
gep: |ValueRef| -> ValueRef)
-> Datum<Lvalue> {
Datum {
val: gep(self.val),
kind: Lvalue,
ty: ty,
}
}
pub fn get_vec_base_and_len<'a>(&self, bcx: &'a Block<'a>) -> (ValueRef, ValueRef) {
//! Converts a vector into the slice pair.
tvec::get_base_and_len(bcx, self.val, self.ty)
}
}
fn load<'a>(bcx: &'a Block<'a>, llptr: ValueRef, ty: ty::t) -> ValueRef {
/*!
* Private helper for loading from a by-ref datum. Handles various
* special cases where the type gives us better information about
* what we are loading.
*/
if type_is_zero_size(bcx.ccx(), ty) {
C_undef(type_of::type_of(bcx.ccx(), ty))
} else if ty::type_is_bool(ty) {
LoadRangeAssert(bcx, llptr, 0, 2, lib::llvm::False)
} else if ty::type_is_char(ty) {
// a char is a unicode codepoint, and so takes values from 0
// to 0x10FFFF inclusive only.
LoadRangeAssert(bcx, llptr, 0, 0x10FFFF + 1, lib::llvm::False)
} else {
Load(bcx, llptr)
}
}
/**
* Generic methods applicable to any sort of datum.
*/
impl<K:KindOps> Datum<K> {
pub fn to_expr_datum(self) -> Datum<Expr> {
let Datum { val, ty, kind } = self;
Datum { val: val, ty: ty, kind: kind.to_expr_kind() }
}
pub fn store_to<'a>(self,
bcx: &'a Block<'a>,
dst: ValueRef)
-> &'a Block<'a> {
/*!
* Moves or copies this value into a new home, as appropriate
* depending on the type of the datum. This method consumes
* the datum, since it would be incorrect to go on using the
* datum if the value represented is affine (and hence the value
* is moved).
*/
self.shallow_copy(bcx, dst);
self.kind.post_store(bcx, self.val, self.ty)
}
fn shallow_copy<'a>(&self,
bcx: &'a Block<'a>,
dst: ValueRef)
-> &'a Block<'a> {
/*!
* Helper function that performs a shallow copy of this value
* into `dst`, which should be a pointer to a memory location
* suitable for `self.ty`. `dst` should contain uninitialized
* memory (either newly allocated, zeroed, or dropped).
*
* This function is private to datums because it leaves memory
* in an unstable state, where the source value has been
* copied but not zeroed. Public methods are `store_to` (if
* you no longer need the source value) or
* `shallow_copy_and_take` (if you wish the source value to
* remain valid).
*/
let _icx = push_ctxt("copy_to_no_check");
if type_is_zero_size(bcx.ccx(), self.ty) {
return bcx;
}
if self.kind.is_by_ref() {
memcpy_ty(bcx, dst, self.val, self.ty);
} else {
Store(bcx, self.val, dst);
}
return bcx;
}
pub fn shallow_copy_and_take<'a>(&self,
bcx: &'a Block<'a>,
dst: ValueRef)
-> &'a Block<'a> {
/*!
* Copies the value into a new location and runs any necessary
* take glue on the new location. This function always
* preserves the existing datum as a valid value. Therefore,
* it does not consume `self` and, also, cannot be applied to
* affine values (since they must never be duplicated).
*/
assert!(!ty::type_moves_by_default(bcx.tcx(), self.ty));
let mut bcx = bcx;
bcx = self.shallow_copy(bcx, dst);
glue::take_ty(bcx, dst, self.ty)
}
#[allow(dead_code)] // useful for debugging
pub fn to_str(&self, ccx: &CrateContext) -> StrBuf {
format_strbuf!("Datum({}, {}, {:?})",
ccx.tn.val_to_str(self.val),
ty_to_str(ccx.tcx(), self.ty),
self.kind)
}
pub fn appropriate_rvalue_mode(&self, ccx: &CrateContext) -> RvalueMode {
/*! See the `appropriate_rvalue_mode()` function */
appropriate_rvalue_mode(ccx, self.ty)
}
pub fn to_llscalarish<'a>(self, bcx: &'a Block<'a>) -> ValueRef {
/*!
* Converts `self` into a by-value `ValueRef`. Consumes this
* datum (i.e., absolves you of responsibility to cleanup the
* value). For this to work, the value must be something
* scalar-ish (like an int or a pointer) which (1) does not
* require drop glue and (2) is naturally passed around by
* value, and not by reference.
*/
assert!(!ty::type_needs_drop(bcx.tcx(), self.ty));
assert!(self.appropriate_rvalue_mode(bcx.ccx()) == ByValue);
if self.kind.is_by_ref() {
load(bcx, self.val, self.ty)
} else {
self.val
}
}
pub fn to_llbool<'a>(self, bcx: &'a Block<'a>) -> ValueRef {
assert!(ty::type_is_bool(self.ty) || ty::type_is_bot(self.ty))
let cond_val = self.to_llscalarish(bcx);
bool_to_i1(bcx, cond_val)
}
}
impl<'a, K:KindOps> DatumBlock<'a, K> {
pub fn to_expr_datumblock(self) -> DatumBlock<'a, Expr> {
DatumBlock(self.bcx, self.datum.to_expr_datum())
}
}
impl<'a> DatumBlock<'a, Expr> {
pub fn store_to_dest(self,
dest: expr::Dest,
expr_id: ast::NodeId) -> &'a Block<'a> {
let DatumBlock { bcx, datum } = self;
datum.store_to_dest(bcx, dest, expr_id)
}
pub fn to_llbool(self) -> Result<'a> {
let DatumBlock { datum, bcx } = self;
Result::new(bcx, datum.to_llbool(bcx))
}
}
| apache-2.0 |
googleapis/java-dataproc-metastore | proto-google-cloud-dataproc-metastore-v1alpha/src/main/java/com/google/cloud/metastore/v1alpha/ListMetadataImportsRequestOrBuilder.java | 5131 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/metastore/v1alpha/metastore.proto
package com.google.cloud.metastore.v1alpha;
public interface ListMetadataImportsRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.metastore.v1alpha.ListMetadataImportsRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. The relative resource name of the service whose metadata imports to
* list, in the following form:
* `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
java.lang.String getParent();
/**
*
*
* <pre>
* Required. The relative resource name of the service whose metadata imports to
* list, in the following form:
* `projects/{project_number}/locations/{location_id}/services/{service_id}/metadataImports`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
com.google.protobuf.ByteString getParentBytes();
/**
*
*
* <pre>
* Optional. The maximum number of imports to return. The response may contain less
* than the maximum number. If unspecified, no more than 500 imports are
* returned. The maximum value is 1000; values above 1000 are changed to 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
int getPageSize();
/**
*
*
* <pre>
* Optional. A page token, received from a previous [DataprocMetastore.ListServices][google.cloud.metastore.v1alpha.DataprocMetastore.ListServices]
* call. Provide this token to retrieve the subsequent page.
* To retrieve the first page, supply an empty page token.
* When paginating, other parameters provided to
* [DataprocMetastore.ListServices][google.cloud.metastore.v1alpha.DataprocMetastore.ListServices] must match the call that provided the
* page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
java.lang.String getPageToken();
/**
*
*
* <pre>
* Optional. A page token, received from a previous [DataprocMetastore.ListServices][google.cloud.metastore.v1alpha.DataprocMetastore.ListServices]
* call. Provide this token to retrieve the subsequent page.
* To retrieve the first page, supply an empty page token.
* When paginating, other parameters provided to
* [DataprocMetastore.ListServices][google.cloud.metastore.v1alpha.DataprocMetastore.ListServices] must match the call that provided the
* page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
com.google.protobuf.ByteString getPageTokenBytes();
/**
*
*
* <pre>
* Optional. The filter to apply to list results.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
java.lang.String getFilter();
/**
*
*
* <pre>
* Optional. The filter to apply to list results.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
com.google.protobuf.ByteString getFilterBytes();
/**
*
*
* <pre>
* Optional. Specify the ordering of results as described in [Sorting
* Order](https://cloud.google.com/apis/design/design_patterns#sorting_order).
* If not specified, the results will be sorted in the default order.
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The orderBy.
*/
java.lang.String getOrderBy();
/**
*
*
* <pre>
* Optional. Specify the ordering of results as described in [Sorting
* Order](https://cloud.google.com/apis/design/design_patterns#sorting_order).
* If not specified, the results will be sorted in the default order.
* </pre>
*
* <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for orderBy.
*/
com.google.protobuf.ByteString getOrderByBytes();
}
| apache-2.0 |
bcbroussard/kubernetes-travis | pkg/kubectl/stop.go | 3205 | /*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubectl
import (
"fmt"
"time"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api"
"github.com/GoogleCloudPlatform/kubernetes/pkg/api/meta"
"github.com/GoogleCloudPlatform/kubernetes/pkg/client"
)
const (
Interval = time.Millisecond * 100
Timeout = time.Second * 20
)
// A Reaper handles terminating an object as gracefully as possible.
type Reaper interface {
Stop(namespace, name string, gracePeriod *api.DeleteOptions) (string, error)
}
type NoSuchReaperError struct {
kind string
}
func (n *NoSuchReaperError) Error() string {
return fmt.Sprintf("no reaper has been implemented for %q", n.kind)
}
func IsNoSuchReaperError(err error) bool {
_, ok := err.(*NoSuchReaperError)
return ok
}
func ReaperFor(kind string, c client.Interface) (Reaper, error) {
switch kind {
case "ReplicationController":
return &ReplicationControllerReaper{c, Interval, Timeout}, nil
case "Pod":
return &PodReaper{c}, nil
case "Service":
return &ServiceReaper{c}, nil
}
return nil, &NoSuchReaperError{kind}
}
type ReplicationControllerReaper struct {
client.Interface
pollInterval, timeout time.Duration
}
type PodReaper struct {
client.Interface
}
type ServiceReaper struct {
client.Interface
}
type objInterface interface {
Delete(name string) error
Get(name string) (meta.Interface, error)
}
func (reaper *ReplicationControllerReaper) Stop(namespace, name string, gracePeriod *api.DeleteOptions) (string, error) {
rc := reaper.ReplicationControllers(namespace)
resizer, err := ResizerFor("ReplicationController", NewResizerClient(*reaper))
if err != nil {
return "", err
}
retry := NewRetryParams(reaper.pollInterval, reaper.timeout)
waitForReplicas := NewRetryParams(reaper.pollInterval, reaper.timeout)
if err = resizer.Resize(namespace, name, 0, nil, retry, waitForReplicas); err != nil {
return "", err
}
if err := rc.Delete(name); err != nil {
return "", err
}
return fmt.Sprintf("%s stopped", name), nil
}
func (reaper *PodReaper) Stop(namespace, name string, gracePeriod *api.DeleteOptions) (string, error) {
pods := reaper.Pods(namespace)
_, err := pods.Get(name)
if err != nil {
return "", err
}
if err := pods.Delete(name, gracePeriod); err != nil {
return "", err
}
return fmt.Sprintf("%s stopped", name), nil
}
func (reaper *ServiceReaper) Stop(namespace, name string, gracePeriod *api.DeleteOptions) (string, error) {
services := reaper.Services(namespace)
_, err := services.Get(name)
if err != nil {
return "", err
}
if err := services.Delete(name); err != nil {
return "", err
}
return fmt.Sprintf("%s stopped", name), nil
}
| apache-2.0 |
divinespear/jpa-schema-gradle-plugin | src/functionalTest/resources/java/io/github/divinespear/model/ManyColumns.java | 6120 | package io.github.divinespear.model;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import javax.persistence.*;
/**
* Test JPA Model
*
* @author divinespear
*/
@Entity
@Access(AccessType.FIELD)
@Table(name = "MANY_COLUMN_TABLE")
public class ManyColumns {
@Id
@GeneratedValue
private Long id;
private String column00;
private String column01;
private String column02;
private String column03;
private String column04;
private String column05;
private String column06;
private String column07;
private String column08;
private String column09;
private String column10;
private String column11;
private String column12;
private String column13;
private String column14;
private String column15;
private String column16;
private String column17;
private String column18;
private String column19;
private String column20;
private String column21;
private String column22;
private String column23;
private String column24;
private String column25;
private String column26;
private String column27;
private String column28;
private String column29;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getColumn00() {
return column00;
}
public void setColumn00(String column00) {
this.column00 = column00;
}
public String getColumn01() {
return column01;
}
public void setColumn01(String column01) {
this.column01 = column01;
}
public String getColumn02() {
return column02;
}
public void setColumn02(String column02) {
this.column02 = column02;
}
public String getColumn03() {
return column03;
}
public void setColumn03(String column03) {
this.column03 = column03;
}
public String getColumn04() {
return column04;
}
public void setColumn04(String column04) {
this.column04 = column04;
}
public String getColumn05() {
return column05;
}
public void setColumn05(String column05) {
this.column05 = column05;
}
public String getColumn06() {
return column06;
}
public void setColumn06(String column06) {
this.column06 = column06;
}
public String getColumn07() {
return column07;
}
public void setColumn07(String column07) {
this.column07 = column07;
}
public String getColumn08() {
return column08;
}
public void setColumn08(String column08) {
this.column08 = column08;
}
public String getColumn09() {
return column09;
}
public void setColumn09(String column09) {
this.column09 = column09;
}
public String getColumn10() {
return column10;
}
public void setColumn10(String column10) {
this.column10 = column10;
}
public String getColumn11() {
return column11;
}
public void setColumn11(String column11) {
this.column11 = column11;
}
public String getColumn12() {
return column12;
}
public void setColumn12(String column12) {
this.column12 = column12;
}
public String getColumn13() {
return column13;
}
public void setColumn13(String column13) {
this.column13 = column13;
}
public String getColumn14() {
return column14;
}
public void setColumn14(String column14) {
this.column14 = column14;
}
public String getColumn15() {
return column15;
}
public void setColumn15(String column15) {
this.column15 = column15;
}
public String getColumn16() {
return column16;
}
public void setColumn16(String column16) {
this.column16 = column16;
}
public String getColumn17() {
return column17;
}
public void setColumn17(String column17) {
this.column17 = column17;
}
public String getColumn18() {
return column18;
}
public void setColumn18(String column18) {
this.column18 = column18;
}
public String getColumn19() {
return column19;
}
public void setColumn19(String column19) {
this.column19 = column19;
}
public String getColumn20() {
return column20;
}
public void setColumn20(String column20) {
this.column20 = column20;
}
public String getColumn21() {
return column21;
}
public void setColumn21(String column21) {
this.column21 = column21;
}
public String getColumn22() {
return column22;
}
public void setColumn22(String column22) {
this.column22 = column22;
}
public String getColumn23() {
return column23;
}
public void setColumn23(String column23) {
this.column23 = column23;
}
public String getColumn24() {
return column24;
}
public void setColumn24(String column24) {
this.column24 = column24;
}
public String getColumn25() {
return column25;
}
public void setColumn25(String column25) {
this.column25 = column25;
}
public String getColumn26() {
return column26;
}
public void setColumn26(String column26) {
this.column26 = column26;
}
public String getColumn27() {
return column27;
}
public void setColumn27(String column27) {
this.column27 = column27;
}
public String getColumn28() {
return column28;
}
public void setColumn28(String column28) {
this.column28 = column28;
}
public String getColumn29() {
return column29;
}
public void setColumn29(String column29) {
this.column29 = column29;
}
}
| apache-2.0 |
Symantec/nova-contrail-plugin | contrailrest.py | 6877 | # Copyright 2014 Symantec Corporation.
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the license at
# http://www.apache.org/license/LICENSE-2.0
__author__ = "chinmay_naik@symantec40.com"
import httplib2
import json
import uuid
from nova.openstack.common import log as logging
from oslo.config import cfg
LOG = logging.getLogger(__name__)
designate_restopts = [
cfg.StrOpt('contrail_endpoint',
default='',
help='Contrail API service endpoint url'),
]
CONF = cfg.CONF
CONF.register_opts(designate_restopts)
class ContrailRest():
def __init__(self, tenant_id, tenant_token=None):
self.tenant_id = str(uuid.UUID(tenant_id))
self.token = tenant_token
def _get_project_nw_info_by_tenant(self):
try:
req_headers = {"Content-Type": "application/json"}
if self.token:
req_headers["X-Auth-Token"] = str(self.token)
contrail_ep = CONF.contrail_endpoint
if contrail_ep:
uri = contrail_ep + '/project/' + self.tenant_id
LOG.debug("Contrail RESTAPI URI - %s" % uri)
else:
LOG.error("Contrail RESTAPI please provide a valid "
"contrail API endpoint")
return None
h = httplib2.Http()
resp, content = h.request(uri, "GET", headers=req_headers)
LOG.debug("Contrail RESTAPI Resp - %s" % str(resp))
LOG.debug("Contrail RESTAPI Content - %s" % str(content))
if content:
content = json.loads(content)
if 'status' in resp and resp['status'] in ['200']:
return content["project"]
else:
LOG.error("Contrail RESTAPI generating project nw info from "
"Contrail failed")
except Exception, e:
error_dict = {'error': str(e), 'tenantID': str(self.tenant_id)}
LOG.error("Contrail RESTAPI Error generating project nw info from "
"Contrail for tenant:%(tenantID)s . Error: %(error)s" % error_dict)
return None
def _generate_vDNS_fqname(self):
project_nw_info = self._get_project_nw_info_by_tenant()
if project_nw_info:
try:
project_vDNS = str(project_nw_info['fq_name'][1]) + "-virtual-DNS"
fq_name = [project_nw_info['fq_name'][0], project_vDNS]
return fq_name
except Exception, e:
error_dict = {'error': str(e), 'tenantID': str(self.tenant_id)}
LOG.error("Contrail RESTAPI Error generating vDNS fq name for tenant: "
"%(tenantID)s . Error: %(error)s" % error_dict)
else:
LOG.error("Contrail RESTAPI Project network info is required for generating"
"fq name.")
return None
def _generate_vDNS_id_by_fqname(self):
vDNS_fqname = self._generate_vDNS_fqname()
if vDNS_fqname:
try:
req_headers = {"Content-Type": "application/json"}
if self.token:
req_headers["X-Auth-Token"] = str(self.token)
contrail_ep = CONF.contrail_endpoint
if contrail_ep:
uri = contrail_ep + '/fqname-to-id'
LOG.debug("Contrail RESTAPI URI - %s" % uri)
else:
LOG.error("Contrail RESTAPI please provide a valid "
"contrail API endpoint")
return None
req_body = {
"type": "virtual-DNS",
"fq_name": vDNS_fqname
}
req_body = json.dumps(req_body)
h = httplib2.Http()
resp, content = h.request(uri, "POST", headers=req_headers,
body=req_body)
LOG.debug("Contrail RESTAPI Resp - %s" % str(resp))
LOG.debug("Contrail RESTAPI Content - %s" % str(content))
if content:
content = json.loads(content)
if 'status' in resp and resp['status'] in ['200']:
return content["uuid"]
else:
LOG.error("Contrail RESTAPI generating vDNS ID from "
"Contrail failed")
except Exception, e:
error_dict = {'error': str(e), 'tenantID': str(self.tenant_id)}
LOG.error("Contrail RESTAPI Error generating vDNS ID for tenant: "
"%(tenantID)s . Error: %(error)s" % error_dict)
else:
LOG.error("Contrail RESTAPI vDNS fqname is required for generating"
"vDNS ID.")
return None
def generate_tenant_dns_zone(self):
vDNS_id = self._generate_vDNS_id_by_fqname()
if vDNS_id:
try:
req_headers = {"Content-Type": "application/json"}
if self.token:
req_headers["X-Auth-Token"] = str(self.token)
contrail_ep = CONF.contrail_endpoint
if contrail_ep:
uri = contrail_ep + '/virtual-DNS/' + vDNS_id
LOG.debug("Contrail RESTAPI URI - %s" % uri)
else:
LOG.error("Contrail RESTAPI please provide a valid "
"contrail API endpoint")
return None
h = httplib2.Http()
resp, content = h.request(uri, "GET", headers=req_headers)
LOG.debug("Contrail RESTAPI Resp - %s" % str(resp))
LOG.debug("Contrail RESTAPI Content - %s" % str(content))
if content:
content = json.loads(content)
if 'status' in resp and resp['status'] in ['200']:
return str(content["virtual-DNS"]["virtual_DNS_data"]["domain_name"])
else:
LOG.error("Contrail RESTAPI generating vDNS ID from "
"Contrail failed")
except Exception, e:
error_dict = {'error': str(e), 'tenantID': str(self.tenant_id)}
LOG.error("Contrail RESTAPI Error generating dns zone for tenant: "
"%(tenantID)s . Error: %(error)s" % error_dict)
else:
LOG.error("Contrail RESTAPI vDNS ID is required for generating tenant "
"dns zone.")
return None
| apache-2.0 |
gustavolcorreia/uri | iniciante/exerc1094.py | 590 | # -*- coding: utf-8 -*-
def uri(x):
c = 0
r = 0
s = 0
for i in range(0, int(x)):
y, z = input().split(' ')
if(z == 'C'): c += int(y)
elif(z == 'R'): r += int(y)
elif(z == 'S'): s += int(y)
print('Total: {} cobaias\nTotal de coelhos: {}\nTotal de ratos: {}\nTotal de sapos: {}\nPercentual de coelhos: {:.2f} %\nPercentual de ratos: {:.2f} %\nPercentual de sapos: {:.2f} %'
.format(c + r + s, c, r, s, ((100 / (c + r + s)) * c), ((100 / (c + r + s)) * r), ((100 / (c + r + s)) * s)))
if __name__ == '__main__':
uri(input())
| apache-2.0 |
burris/dwr | serverside/spring/main/java/org/directwebremoting/spring/SpringConfigurator.java | 9446 | /*
* Copyright 2005-2006 Joe Walker
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.directwebremoting.spring;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.directwebremoting.AjaxFilter;
import org.directwebremoting.Container;
import org.directwebremoting.extend.AccessControl;
import org.directwebremoting.extend.AjaxFilterManager;
import org.directwebremoting.extend.Configurator;
import org.directwebremoting.extend.Converter;
import org.directwebremoting.extend.ConverterManager;
import org.directwebremoting.extend.Creator;
import org.directwebremoting.extend.CreatorManager;
import org.directwebremoting.impl.SignatureParser;
import org.directwebremoting.util.LocalUtil;
import org.springframework.util.StringUtils;
/**
* @author Joe Walker [joe at getahead dot ltd dot uk]
*/
public class SpringConfigurator implements Configurator
{
/* (non-Javadoc)
* @see org.directwebremoting.Configurator#configure(org.directwebremoting.Container)
*/
public void configure(Container container)
{
AccessControl accessControl = container.getBean(AccessControl.class);
AjaxFilterManager ajaxFilterManager = container.getBean(AjaxFilterManager.class);
ConverterManager converterManager = container.getBean(ConverterManager.class);
CreatorManager creatorManager = container.getBean(CreatorManager.class);
// Configure the creator types
if (creatorTypes != null)
{
for (Entry<String, String> entry : creatorTypes.entrySet())
{
String typeName = entry.getKey();
String className = entry.getValue();
creatorManager.addCreatorType(typeName, className);
}
}
// Configure the converter types
if (converterTypes != null)
{
for (Entry<String, String> entry : converterTypes.entrySet())
{
String typeName = entry.getKey();
String className = entry.getValue();
converterManager.addConverterType(typeName, className);
}
}
// Configure the creators
if (creators != null)
{
try
{
for (Entry<String, CreatorConfig> entry : creators.entrySet())
{
String scriptName = entry.getKey();
CreatorConfig creatorConfig = entry.getValue();
if (creatorConfig.getCreator() != null)
{
Creator creator = creatorConfig.getCreator();
creatorManager.addCreator(scriptName, creator);
}
else
{
String creatorName = creatorConfig.getCreatorType();
Map<String, String> params = creatorConfig.getParams();
creatorManager.addCreator(scriptName, creatorName, params);
}
for (String exclude : creatorConfig.getExcludes())
{
accessControl.addExcludeRule(scriptName, exclude);
}
for (String include : creatorConfig.getIncludes())
{
accessControl.addIncludeRule(scriptName, include);
}
Map<String, List<String>> auth = creatorConfig.getAuth();
for (Entry<String, List<String>> constraint : auth.entrySet())
{
for (String role : constraint.getValue())
{
accessControl.addRoleRestriction(scriptName, constraint.getKey(), role);
}
}
List<?> filters = creatorConfig.getFilters();
for (Object obj : filters)
{
if (obj instanceof String)
{
String filterName = (String) obj;
AjaxFilter filter = LocalUtil.classNewInstance(filterName, filterName, AjaxFilter.class);
if (filter != null)
{
ajaxFilterManager.addAjaxFilter(filter, scriptName);
}
}
else if (obj instanceof AjaxFilter)
{
AjaxFilter filter = (AjaxFilter) obj;
ajaxFilterManager.addAjaxFilter(filter, scriptName);
}
else
{
throw new IllegalArgumentException("An invalid filter is added for script '" + scriptName + "'. It should either be the class name of the filter or an instantiated AjaxFilter, but was: '" + obj + "'.");
}
}
}
}
catch (Exception ex)
{
throw new IllegalArgumentException(ex);
}
}
// Configure the converters
if (converters != null)
{
try
{
for (Entry<String, ConverterConfig> entry : converters.entrySet())
{
String match = entry.getKey();
ConverterConfig converterConfig = entry.getValue();
Map<String, String> params = converterConfig.getParams();
if (!converterConfig.getIncludes().isEmpty())
{
params.put("include", StringUtils.collectionToCommaDelimitedString(converterConfig.getIncludes()));
}
if (!converterConfig.getExcludes().isEmpty())
{
params.put("exclude", StringUtils.collectionToCommaDelimitedString(converterConfig.getExcludes()));
}
// params.put("force", Boolean.valueOf(converterConfig.isForce()));
if (StringUtils.hasText(converterConfig.getJavascriptClassName()))
{
params.put("javascript", converterConfig.getJavascriptClassName());
}
String type = converterConfig.getType();
if(type.startsWith("preconfigured"))
{
converterManager.addConverter(match, (Converter) container.getBean(type.substring(14)));
}
else
{
converterManager.addConverter(match, type, params);
}
}
}
catch (Exception ex)
{
throw new IllegalArgumentException("An error occurred while configuring the converters.");
}
}
// Configure the signatures
if (StringUtils.hasText(signatures)) {
SignatureParser sigp = new SignatureParser(converterManager, creatorManager);
sigp.parse(signatures);
}
}
/**
* Setter for the map of Creator types
* @param creatorTypes The new creator types map
*/
public void setCreatorTypes(Map<String, String> creatorTypes)
{
this.creatorTypes = creatorTypes;
}
/**
* Setter for the map of Converter types
* @param converterTypes The new creator types map
*/
public void setConverterTypes(Map<String, String> converterTypes)
{
this.converterTypes = converterTypes;
}
/**
* Setter for the map of real Creators
* @param creators The new creator map
*/
public void setCreators(Map<String, CreatorConfig> creators)
{
this.creators = creators;
}
/**
* Setter for the map of real Converter
* @param converters The new creator map
*/
public void setConverters(Map<String, ConverterConfig> converters)
{
this.converters = converters;
}
/**
* @param signatures the signatures to set
*/
public void setSignatures(String signatures)
{
this.signatures = signatures;
}
/**
* @return the signatures
*/
public String getSignatures()
{
return signatures;
}
/**
* The map of Converter types
*/
private Map<String, String> creatorTypes;
/**
* The map of Converter types
*/
private Map<String, String> converterTypes;
/**
* The map of real Creators
*/
private Map<String, CreatorConfig> creators;
/**
* The map of real Converter
*/
private Map<String, ConverterConfig> converters;
/**
* The string of Signatures
*/
private String signatures;
} | apache-2.0 |
GoogleCloudPlatform/deploymentmanager-samples | examples/v2/gke/python/cluster.py | 4278 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create configuration to deploy GKE cluster."""
import six
def GenerateConfig(context):
"""Generate YAML resource configuration."""
name_prefix = context.env['deployment'] + '-' + context.env['name']
cluster_name = name_prefix
type_name = name_prefix + '-type'
resources = [
{
'name': cluster_name,
'type': 'gcp-types/container-v1:projects.zones.clusters',
'properties': {
'zone': context.properties['zone'],
'cluster': {
'name': cluster_name,
'initialNodeCount': context.properties['initialNodeCount'],
'nodeConfig': {
'oauthScopes': [
'https://www.googleapis.com/auth/' + s
for s in [
'compute',
'devstorage.read_only',
'logging.write',
'monitoring'
]
]
}
}
}
}
]
resources.append({
'name': type_name,
'type': 'deploymentmanager.v2beta.typeProvider',
'properties': {
'options': {
'validationOptions': {
# Kubernetes API accepts ints, in fields they annotate
# with string. This validation will show as warning
# rather than failure for Deployment Manager.
# https://github.com/kubernetes/kubernetes/issues/2971
'schemaValidation': 'IGNORE_WITH_WARNINGS'
},
# According to kubernetes spec, the path parameter 'name'
# should be the value inside the metadata field
# https://github.com/kubernetes/community/blob/master
# /contributors/devel/api-conventions.md
# This mapping specifies that
'inputMappings': [{
'fieldName': 'name',
'location': 'PATH',
'methodMatch': '^(GET|DELETE|PUT|POST|PATCH)$',
'value': '$.ifNull('
'$.resource.properties.metadata.name, '
'$.resource.name)'
}, {
'fieldName': 'metadata.name',
'location': 'BODY',
'methodMatch': '^(PUT|POST)$',
'value': '$.ifNull('
'$.resource.properties.metadata.name, '
'$.resource.name)'
}, {
'fieldName': 'Authorization',
'location': 'HEADER',
'value': '$.concat("Bearer ",'
'$.googleOauth2AccessToken())'
}, {
'fieldName': 'metadata.resourceVersion',
'location': 'BODY',
'methodMatch': '^(PUT|PATCH)$',
'value': '$.resource.self.metadata.resourceVersion'
}, {
'fieldName': 'id',
'location': 'PATH',
'methodMatch': '^(GET|DELETE|PUT|POST|PATCH)$',
'value': '$.resource.properties.id'
}, {
'fieldName': 'namespace',
'location': 'PATH',
'methodMatch': '^(GET|DELETE|PUT|POST|PATCH)$',
'value': '$.resource.properties.namespace'
}]
},
'descriptorUrl':
''.join([
'https://$(ref.', cluster_name, '.endpoint)/openapi/v2'
])
}
})
return {'resources': resources}
| apache-2.0 |
caioximenes/AgoUnibratec | Source/AgoNegocio/MaoDeObraNE.cs | 884 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using AgoBasico;
using AgoDAO;
namespace AgoNegocio
{
public class MaoDeObraNE
{
public List<MaoDeObra> Consultar(MaoDeObra maoDeObra)
{
try
{
List<MaoDeObra> lista = new MaoDeObraDA().ConsultarMaoDeObra(maoDeObra);
return lista;
}
catch (Exception ex)
{
throw ex;
}
}
public MaoDeObra ConsultarUmaMaoDeObra(MaoDeObra maoDeObra)
{
try
{
maoDeObra = DAFactory.createMaoDeObraDA().ConsultarUmaMaoDeObra(maoDeObra);
return maoDeObra;
}
catch (Exception ex)
{
throw ex;
}
}
}
}
| apache-2.0 |
SoftwareKing/ACMeOJ | JKoj06/src/com/xujin/oj/utils/JsonUtil.java | 1572 | package com.xujin.oj.utils;
import java.io.IOException;
import java.io.StringWriter;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class JsonUtil {
private static JsonUtil ju;
private static JsonFactory jf;
private static ObjectMapper mapper;
private JsonUtil(){}
public static JsonUtil getInstance() {
if(ju==null) ju = new JsonUtil();
return ju;
}
public static ObjectMapper getMapper() {
if(mapper==null) {
mapper = new ObjectMapper();
}
return mapper;
}
public static JsonFactory getFactory() {
if(jf==null) jf = new JsonFactory();
return jf;
}
public String obj2json(Object obj) {
JsonGenerator jg = null;
try {
jf = getFactory();
mapper = getMapper();
StringWriter out = new StringWriter();
jg = jf.createJsonGenerator(out);
mapper.writeValue(jg, obj);
return out.toString();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if(jg!=null) jg.close();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public Object json2obj(String json,Class<?> clz) {
try {
mapper = getMapper();
return mapper.readValue(json,clz);
} catch (JsonParseException e) {
e.printStackTrace();
} catch (JsonMappingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
| apache-2.0 |
NationalSecurityAgency/ghidra | Ghidra/Features/VersionTracking/src/test.slow/java/ghidra/feature/vt/gui/provider/VTDuplicateSymbolMatchTest.java | 4526 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.feature.vt.gui.provider;
import static org.junit.Assert.*;
import ghidra.feature.vt.api.correlator.program.DuplicateSymbolNameProgramCorrelatorFactory;
import ghidra.feature.vt.api.main.VTMatchSet;
import ghidra.feature.vt.api.main.VTSession;
import ghidra.feature.vt.gui.VTTestEnv;
import ghidra.framework.plugintool.PluginTool;
import ghidra.program.model.address.Address;
import ghidra.program.model.address.AddressFactory;
import ghidra.program.model.listing.Program;
import ghidra.test.AbstractGhidraHeadedIntegrationTest;
import java.util.List;
import javax.swing.JFrame;
import org.junit.*;
public class VTDuplicateSymbolMatchTest extends AbstractGhidraHeadedIntegrationTest {
private VTTestEnv env;
private VTSession session;
private Program srcProg;
private Program destProg;
public VTDuplicateSymbolMatchTest() {
super();
}
@Before
public void setUp() throws Exception {
env = new VTTestEnv();
PluginTool tool = env.showTool();
session =
env.createSession("VersionTracking/WallaceSrc.dupeStringTest.gzf",
"VersionTracking/WallaceVersion2",
new DuplicateSymbolNameProgramCorrelatorFactory());
assertNotNull(session);
srcProg = env.getSourceProgram();
destProg = env.getDestinationProgram();
JFrame toolFrame = tool.getToolFrame();
toolFrame.setSize(800, 800);
}
@After
public void tearDown() throws Exception {
env.dispose();
}
@Test
public void testDuplicateSymbolCorrelator() {
List<VTMatchSet> matchSets = session.getMatchSets();
assertEquals(3, matchSets.size());
//make sure these are found by the duplicate symbol matcher:
//s_Stack_area_around__alloca_memory_00417060 00417060 s_Stack_area_around__alloca_memory_00417060 00417060
assertTrue(isMatch(addr(srcProg, "00417060"), addr(destProg, "00417060")));
//s_Stack_area_around__alloca_memory_00417060 00417060 s_Stack_area_around__alloca_memory_00416fd8 00416fd8
assertTrue(isMatch(addr(srcProg, "00417060"), addr(destProg, "00416fd8")));
//s_Stack_area_around__alloca_memory_00416fd8 00416fd8 s_Stack_area_around__alloca_memory_00417060 00417060
assertTrue(isMatch(addr(srcProg, "00416fd8"), addr(destProg, "00417060")));
//s_Stack_area_around__alloca_memory_00416fd8 00416fd8 s_Stack_area_around__alloca_memory_00416fd8 00416fd8
assertTrue(isMatch(addr(srcProg, "00416fd8"), addr(destProg, "00416fd8")));
//s_%s_%s_deployed_on_%s__004166a0 004166a0 s_%s_%s_deployed_on_%s__00416830 00416830
assertTrue(isMatch(addr(srcProg, "004166a0"), addr(destProg, "00416830")));
//s_%s_%s_deployed_on_%s__00416830 00416830 s_%s_%s_deployed_on_%s__00416830 00416830
assertTrue(isMatch(addr(srcProg, "00416830"), addr(destProg, "00416830")));
//Make sure these are not found by the duplicate matcher (should be found by unique matcher)
//they were erroneously being found because the old way of removing the address tail removed
//the _e from these so they all appeared to match each other
//_initterm_e 00419284 _initterm_e 00419280
assertFalse(isMatch(addr(srcProg, "00419284"), addr(destProg, "00419280")));
//_initterm 00419288 _initterm_e 00419280
assertFalse(isMatch(addr(srcProg, "00419288"), addr(destProg, "00419280")));
//_initterm_e 00419284 _initterm 00419284
assertFalse(isMatch(addr(srcProg, "00419284"), addr(destProg, "00419284")));
//_initterm 00419288 _initterm 00419284
assertFalse(isMatch(addr(srcProg, "00419288"), addr(destProg, "00419284")));
}
public boolean isMatch(Address srcAddr, Address destAddr) {
List<VTMatchSet> matchSets = session.getMatchSets();
for (int i = 0; i < matchSets.size(); i++) {
VTMatchSet matchSet = matchSets.get(i);
if (matchSet.getMatches(srcAddr, destAddr).size() > 0) {
return true;
}
}
return false;
}
private Address addr(Program program, String address) {
AddressFactory addrFactory = program.getAddressFactory();
return addrFactory.getAddress(address);
}
}
| apache-2.0 |
ejona86/grpc-java | xds/src/test/java/io/grpc/xds/ClientXdsClientDataTest.java | 47206 | /*
* Copyright 2021 The gRPC Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.xds;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableMap;
import com.google.protobuf.Any;
import com.google.protobuf.BoolValue;
import com.google.protobuf.StringValue;
import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.util.Durations;
import com.google.re2j.Pattern;
import io.envoyproxy.envoy.config.cluster.v3.Cluster;
import io.envoyproxy.envoy.config.cluster.v3.Cluster.DiscoveryType;
import io.envoyproxy.envoy.config.cluster.v3.Cluster.EdsClusterConfig;
import io.envoyproxy.envoy.config.cluster.v3.Cluster.LbPolicy;
import io.envoyproxy.envoy.config.cluster.v3.Cluster.RingHashLbConfig;
import io.envoyproxy.envoy.config.cluster.v3.Cluster.RingHashLbConfig.HashFunction;
import io.envoyproxy.envoy.config.core.v3.Address;
import io.envoyproxy.envoy.config.core.v3.AggregatedConfigSource;
import io.envoyproxy.envoy.config.core.v3.ConfigSource;
import io.envoyproxy.envoy.config.core.v3.ExtensionConfigSource;
import io.envoyproxy.envoy.config.core.v3.Locality;
import io.envoyproxy.envoy.config.core.v3.RuntimeFractionalPercent;
import io.envoyproxy.envoy.config.core.v3.SocketAddress;
import io.envoyproxy.envoy.config.core.v3.TrafficDirection;
import io.envoyproxy.envoy.config.core.v3.TransportSocket;
import io.envoyproxy.envoy.config.endpoint.v3.Endpoint;
import io.envoyproxy.envoy.config.listener.v3.Filter;
import io.envoyproxy.envoy.config.listener.v3.FilterChain;
import io.envoyproxy.envoy.config.listener.v3.FilterChainMatch;
import io.envoyproxy.envoy.config.listener.v3.Listener;
import io.envoyproxy.envoy.config.listener.v3.ListenerFilter;
import io.envoyproxy.envoy.config.route.v3.DirectResponseAction;
import io.envoyproxy.envoy.config.route.v3.FilterAction;
import io.envoyproxy.envoy.config.route.v3.RedirectAction;
import io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.ConnectionProperties;
import io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.FilterState;
import io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.Header;
import io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.QueryParameter;
import io.envoyproxy.envoy.config.route.v3.RouteAction.MaxStreamDuration;
import io.envoyproxy.envoy.config.route.v3.WeightedCluster;
import io.envoyproxy.envoy.extensions.filters.common.fault.v3.FaultDelay;
import io.envoyproxy.envoy.extensions.filters.http.fault.v3.HTTPFault;
import io.envoyproxy.envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager;
import io.envoyproxy.envoy.extensions.filters.network.http_connection_manager.v3.HttpFilter;
import io.envoyproxy.envoy.type.matcher.v3.RegexMatchAndSubstitute;
import io.envoyproxy.envoy.type.matcher.v3.RegexMatcher;
import io.envoyproxy.envoy.type.matcher.v3.RegexMatcher.GoogleRE2;
import io.envoyproxy.envoy.type.v3.FractionalPercent;
import io.envoyproxy.envoy.type.v3.FractionalPercent.DenominatorType;
import io.envoyproxy.envoy.type.v3.Int64Range;
import io.grpc.Status.Code;
import io.grpc.xds.ClientXdsClient.ResourceInvalidException;
import io.grpc.xds.ClientXdsClient.StructOrError;
import io.grpc.xds.Endpoints.LbEndpoint;
import io.grpc.xds.Endpoints.LocalityLbEndpoints;
import io.grpc.xds.FaultConfig.FaultAbort;
import io.grpc.xds.Filter.FilterConfig;
import io.grpc.xds.VirtualHost.Route;
import io.grpc.xds.VirtualHost.Route.RouteAction;
import io.grpc.xds.VirtualHost.Route.RouteAction.ClusterWeight;
import io.grpc.xds.VirtualHost.Route.RouteAction.HashPolicy;
import io.grpc.xds.VirtualHost.Route.RouteMatch;
import io.grpc.xds.VirtualHost.Route.RouteMatch.PathMatcher;
import io.grpc.xds.XdsClient.CdsUpdate;
import io.grpc.xds.internal.Matchers.FractionMatcher;
import io.grpc.xds.internal.Matchers.HeaderMatcher;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class ClientXdsClientDataTest {
@SuppressWarnings("deprecation") // https://github.com/grpc/grpc-java/issues/7467
@Rule
public final ExpectedException thrown = ExpectedException.none();
@Test
public void parseRoute_withRouteAction() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method"))
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo"))
.build();
StructOrError<Route> struct = ClientXdsClient.parseRoute(proto, false);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
Route.create(
RouteMatch.create(PathMatcher.fromPath("/service/method", false),
Collections.<HeaderMatcher>emptyList(), null),
RouteAction.forCluster("cluster-foo", Collections.<HashPolicy>emptyList(), null),
ImmutableMap.<String, FilterConfig>of()));
}
@Test
public void parseRoute_withUnsupportedActionTypes() {
StructOrError<Route> res;
io.envoyproxy.envoy.config.route.v3.Route redirectRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setRedirect(RedirectAction.getDefaultInstance())
.build();
res = ClientXdsClient.parseRoute(redirectRoute, false);
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail()).isEqualTo("Unsupported action type: redirect");
io.envoyproxy.envoy.config.route.v3.Route directResponseRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setDirectResponse(DirectResponseAction.getDefaultInstance())
.build();
res = ClientXdsClient.parseRoute(directResponseRoute, false);
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail()).isEqualTo("Unsupported action type: direct_response");
io.envoyproxy.envoy.config.route.v3.Route filterRoute =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("route-blade")
.setMatch(io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPath(""))
.setFilterAction(FilterAction.getDefaultInstance())
.build();
res = ClientXdsClient.parseRoute(filterRoute, false);
assertThat(res.getStruct()).isNull();
assertThat(res.getErrorDetail()).isEqualTo("Unsupported action type: filter_action");
}
@Test
public void parseRoute_skipRouteWithUnsupportedMatcher() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("ignore me")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method")
.addQueryParameters(
io.envoyproxy.envoy.config.route.v3.QueryParameterMatcher
.getDefaultInstance())) // query parameter not supported
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo"))
.build();
assertThat(ClientXdsClient.parseRoute(proto, false)).isNull();
}
@Test
public void parseRoute_skipRouteWithUnsupportedAction() {
io.envoyproxy.envoy.config.route.v3.Route proto =
io.envoyproxy.envoy.config.route.v3.Route.newBuilder()
.setName("ignore me")
.setMatch(
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method"))
.setRoute(
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setClusterHeader("cluster header")) // cluster_header action not supported
.build();
assertThat(ClientXdsClient.parseRoute(proto, false)).isNull();
}
@Test
public void parseRouteMatch_withHeaderMatcher() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPrefix("")
.addHeaders(
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":scheme")
.setPrefixMatch("http"))
.addHeaders(
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setExactMatch("PUT"))
.build();
StructOrError<RouteMatch> struct = ClientXdsClient.parseRouteMatch(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
RouteMatch.create(
PathMatcher.fromPrefix("", false),
Arrays.asList(
HeaderMatcher.forPrefix(":scheme", "http", false),
HeaderMatcher.forExactValue(":method", "PUT", false)),
null));
}
@Test
public void parseRouteMatch_withRuntimeFractionMatcher() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPrefix("")
.setRuntimeFraction(
RuntimeFractionalPercent.newBuilder()
.setDefaultValue(
FractionalPercent.newBuilder()
.setNumerator(30)
.setDenominator(FractionalPercent.DenominatorType.HUNDRED)))
.build();
StructOrError<RouteMatch> struct = ClientXdsClient.parseRouteMatch(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct())
.isEqualTo(
RouteMatch.create(
PathMatcher.fromPrefix( "", false), Collections.<HeaderMatcher>emptyList(),
FractionMatcher.create(30, 100)));
}
@Test
public void parsePathMatcher_withFullPath() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setPath("/service/method")
.build();
StructOrError<PathMatcher> struct = ClientXdsClient.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
PathMatcher.fromPath("/service/method", false));
}
@Test
public void parsePathMatcher_withPrefix() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder().setPrefix("/").build();
StructOrError<PathMatcher> struct = ClientXdsClient.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
PathMatcher.fromPrefix("/", false));
}
@Test
public void parsePathMatcher_withSafeRegEx() {
io.envoyproxy.envoy.config.route.v3.RouteMatch proto =
io.envoyproxy.envoy.config.route.v3.RouteMatch.newBuilder()
.setSafeRegex(RegexMatcher.newBuilder().setRegex("."))
.build();
StructOrError<PathMatcher> struct = ClientXdsClient.parsePathMatcher(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(PathMatcher.fromRegEx(Pattern.compile(".")));
}
@Test
public void parseHeaderMatcher_withExactMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setExactMatch("PUT")
.build();
StructOrError<HeaderMatcher> struct1 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct1.getErrorDetail()).isNull();
assertThat(struct1.getStruct()).isEqualTo(
HeaderMatcher.forExactValue(":method", "PUT", false));
}
@Test
public void parseHeaderMatcher_withSafeRegExMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setSafeRegexMatch(RegexMatcher.newBuilder().setRegex("P*"))
.build();
StructOrError<HeaderMatcher> struct3 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct3.getErrorDetail()).isNull();
assertThat(struct3.getStruct()).isEqualTo(
HeaderMatcher.forSafeRegEx(":method", Pattern.compile("P*"), false));
}
@Test
public void parseHeaderMatcher_withRangeMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("timeout")
.setRangeMatch(Int64Range.newBuilder().setStart(10L).setEnd(20L))
.build();
StructOrError<HeaderMatcher> struct4 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct4.getErrorDetail()).isNull();
assertThat(struct4.getStruct()).isEqualTo(
HeaderMatcher.forRange("timeout", HeaderMatcher.Range.create(10L, 20L), false));
}
@Test
public void parseHeaderMatcher_withPresentMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("user-agent")
.setPresentMatch(true)
.build();
StructOrError<HeaderMatcher> struct5 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct5.getErrorDetail()).isNull();
assertThat(struct5.getStruct()).isEqualTo(
HeaderMatcher.forPresent("user-agent", true, false));
}
@Test
public void parseHeaderMatcher_withPrefixMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("authority")
.setPrefixMatch("service-foo")
.build();
StructOrError<HeaderMatcher> struct6 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct6.getErrorDetail()).isNull();
assertThat(struct6.getStruct()).isEqualTo(
HeaderMatcher.forPrefix("authority", "service-foo", false));
}
@Test
public void parseHeaderMatcher_withSuffixMatch() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName("authority")
.setSuffixMatch("googleapis.com")
.build();
StructOrError<HeaderMatcher> struct7 = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct7.getErrorDetail()).isNull();
assertThat(struct7.getStruct()).isEqualTo(
HeaderMatcher.forSuffix("authority", "googleapis.com", false));
}
@Test
public void parseHeaderMatcher_malformedRegExPattern() {
io.envoyproxy.envoy.config.route.v3.HeaderMatcher proto =
io.envoyproxy.envoy.config.route.v3.HeaderMatcher.newBuilder()
.setName(":method")
.setSafeRegexMatch(RegexMatcher.newBuilder().setRegex("["))
.build();
StructOrError<HeaderMatcher> struct = ClientXdsClient.parseHeaderMatcher(proto);
assertThat(struct.getErrorDetail()).isNotNull();
assertThat(struct.getStruct()).isNull();
}
@Test
public void parseRouteAction_withCluster() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isEqualTo("cluster-foo");
assertThat(struct.getStruct().weightedClusters()).isNull();
}
@Test
public void parseRouteAction_withWeightedCluster() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setWeightedClusters(
WeightedCluster.newBuilder()
.addClusters(
WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30)))
.addClusters(WeightedCluster.ClusterWeight
.newBuilder()
.setName("cluster-bar")
.setWeight(UInt32Value.newBuilder().setValue(70))))
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct().cluster()).isNull();
assertThat(struct.getStruct().weightedClusters()).containsExactly(
ClusterWeight.create("cluster-foo", 30, ImmutableMap.<String, FilterConfig>of()),
ClusterWeight.create("cluster-bar", 70, ImmutableMap.<String, FilterConfig>of()));
}
@Test
public void parseRouteAction_withTimeoutByGrpcTimeoutHeaderMax() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setMaxStreamDuration(
MaxStreamDuration.newBuilder()
.setGrpcTimeoutHeaderMax(Durations.fromSeconds(5L))
.setMaxStreamDuration(Durations.fromMillis(20L)))
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
assertThat(struct.getStruct().timeoutNano()).isEqualTo(TimeUnit.SECONDS.toNanos(5L));
}
@Test
public void parseRouteAction_withTimeoutByMaxStreamDuration() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.setMaxStreamDuration(
MaxStreamDuration.newBuilder()
.setMaxStreamDuration(Durations.fromSeconds(5L)))
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
assertThat(struct.getStruct().timeoutNano()).isEqualTo(TimeUnit.SECONDS.toNanos(5L));
}
@Test
public void parseRouteAction_withTimeoutUnset() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
assertThat(struct.getStruct().timeoutNano()).isNull();
}
@Test
public void parseRouteAction_withHashPolicies() {
io.envoyproxy.envoy.config.route.v3.RouteAction proto =
io.envoyproxy.envoy.config.route.v3.RouteAction.newBuilder()
.setCluster("cluster-foo")
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setHeader(
Header.newBuilder()
.setHeaderName("user-agent")
.setRegexRewrite(
RegexMatchAndSubstitute.newBuilder()
.setPattern(
RegexMatcher.newBuilder()
.setGoogleRe2(GoogleRE2.getDefaultInstance())
.setRegex("grpc.*"))
.setSubstitution("gRPC"))))
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setConnectionProperties(ConnectionProperties.newBuilder().setSourceIp(true))
.setTerminal(true)) // unsupported
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setFilterState(
FilterState.newBuilder()
.setKey(ClientXdsClient.HASH_POLICY_FILTER_STATE_KEY)))
.addHashPolicy(
io.envoyproxy.envoy.config.route.v3.RouteAction.HashPolicy.newBuilder()
.setQueryParameter(
QueryParameter.newBuilder().setName("param"))) // unsupported
.build();
StructOrError<RouteAction> struct = ClientXdsClient.parseRouteAction(proto, false);
List<HashPolicy> policies = struct.getStruct().hashPolicies();
assertThat(policies).hasSize(2);
assertThat(policies.get(0).type()).isEqualTo(HashPolicy.Type.HEADER);
assertThat(policies.get(0).headerName()).isEqualTo("user-agent");
assertThat(policies.get(0).isTerminal()).isFalse();
assertThat(policies.get(0).regEx().pattern()).isEqualTo("grpc.*");
assertThat(policies.get(0).regExSubstitution()).isEqualTo("gRPC");
assertThat(policies.get(1).type()).isEqualTo(HashPolicy.Type.CHANNEL_ID);
assertThat(policies.get(1).isTerminal()).isFalse();
}
@Test
public void parseClusterWeight() {
io.envoyproxy.envoy.config.route.v3.WeightedCluster.ClusterWeight proto =
io.envoyproxy.envoy.config.route.v3.WeightedCluster.ClusterWeight.newBuilder()
.setName("cluster-foo")
.setWeight(UInt32Value.newBuilder().setValue(30))
.build();
ClusterWeight clusterWeight = ClientXdsClient.parseClusterWeight(proto, false).getStruct();
assertThat(clusterWeight.name()).isEqualTo("cluster-foo");
assertThat(clusterWeight.weight()).isEqualTo(30);
}
@Test
public void parseFaultAbort_withHttpStatus() {
io.envoyproxy.envoy.extensions.filters.http.fault.v3.FaultAbort proto =
io.envoyproxy.envoy.extensions.filters.http.fault.v3.FaultAbort.newBuilder()
.setPercentage(FractionalPercent.newBuilder()
.setNumerator(100).setDenominator(DenominatorType.TEN_THOUSAND))
.setHttpStatus(400).build();
FaultAbort res = FaultFilter.parseFaultAbort(proto).config;
assertThat(res.percent().numerator()).isEqualTo(100);
assertThat(res.percent().denominatorType())
.isEqualTo(FaultConfig.FractionalPercent.DenominatorType.TEN_THOUSAND);
assertThat(res.status().getCode()).isEqualTo(Code.INTERNAL);
}
@Test
public void parseFaultAbort_withGrpcStatus() {
io.envoyproxy.envoy.extensions.filters.http.fault.v3.FaultAbort proto =
io.envoyproxy.envoy.extensions.filters.http.fault.v3.FaultAbort.newBuilder()
.setPercentage(FractionalPercent.newBuilder()
.setNumerator(600).setDenominator(DenominatorType.MILLION))
.setGrpcStatus(Code.DEADLINE_EXCEEDED.value()).build();
FaultAbort faultAbort = FaultFilter.parseFaultAbort(proto).config;
assertThat(faultAbort.percent().numerator()).isEqualTo(600);
assertThat(faultAbort.percent().denominatorType())
.isEqualTo(FaultConfig.FractionalPercent.DenominatorType.MILLION);
assertThat(faultAbort.status().getCode()).isEqualTo(Code.DEADLINE_EXCEEDED);
}
@Test
public void parseLocalityLbEndpoints_withHealthyEndpoints() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.HEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = ClientXdsClient.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888, 20, true)), 100, 1));
}
@Test
public void parseLocalityLbEndpoints_treatUnknownHealthAsHealthy() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = ClientXdsClient.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888, 20, true)), 100, 1));
}
@Test
public void parseLocalityLbEndpoints_withUnHealthyEndpoints() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNHEALTHY)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = ClientXdsClient.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isNull();
assertThat(struct.getStruct()).isEqualTo(
LocalityLbEndpoints.create(
Collections.singletonList(LbEndpoint.create("172.14.14.5", 8888, 20, false)), 100, 1));
}
@Test
public void parseLocalityLbEndpoints_ignorZeroWeightLocality() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(0)) // locality weight
.setPriority(1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
assertThat(ClientXdsClient.parseLocalityLbEndpoints(proto)).isNull();
}
@Test
public void parseLocalityLbEndpoints_invalidPriority() {
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints proto =
io.envoyproxy.envoy.config.endpoint.v3.LocalityLbEndpoints.newBuilder()
.setLocality(Locality.newBuilder()
.setRegion("region-foo").setZone("zone-foo").setSubZone("subZone-foo"))
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(100)) // locality weight
.setPriority(-1)
.addLbEndpoints(io.envoyproxy.envoy.config.endpoint.v3.LbEndpoint.newBuilder()
.setEndpoint(Endpoint.newBuilder()
.setAddress(Address.newBuilder()
.setSocketAddress(
SocketAddress.newBuilder()
.setAddress("172.14.14.5").setPortValue(8888))))
.setHealthStatus(io.envoyproxy.envoy.config.core.v3.HealthStatus.UNKNOWN)
.setLoadBalancingWeight(UInt32Value.newBuilder().setValue(20))) // endpoint weight
.build();
StructOrError<LocalityLbEndpoints> struct = ClientXdsClient.parseLocalityLbEndpoints(proto);
assertThat(struct.getErrorDetail()).isEqualTo("negative priority");
}
@Test
public void parseHttpFilter_unsupportedButOptional() {
HttpFilter httpFilter = HttpFilter.newBuilder()
.setIsOptional(true)
.setTypedConfig(Any.pack(StringValue.of("unsupported")))
.build();
assertThat(ClientXdsClient.parseHttpFilter(httpFilter)).isNull();
}
@Test
public void parseHttpFilter_unsupportedAndRequired() {
HttpFilter httpFilter = HttpFilter.newBuilder()
.setIsOptional(false)
.setName("unsupported.filter")
.setTypedConfig(Any.pack(StringValue.of("string value")))
.build();
assertThat(ClientXdsClient.parseHttpFilter(httpFilter).getErrorDetail()).isEqualTo(
"HttpFilter [unsupported.filter] is not optional and has an unsupported config type: "
+ "type.googleapis.com/google.protobuf.StringValue");
}
@Test
public void parseOverrideFilterConfigs_unsupportedButOptional() {
HTTPFault httpFault = HTTPFault.newBuilder()
.setDelay(FaultDelay.newBuilder().setFixedDelay(Durations.fromNanos(3000)))
.build();
Map<String, Any> configOverrides = ImmutableMap.of(
"envoy.fault",
Any.pack(httpFault),
"unsupported.filter",
Any.pack(io.envoyproxy.envoy.config.route.v3.FilterConfig.newBuilder()
.setIsOptional(true).setConfig(Any.pack(StringValue.of("string value")))
.build()));
Map<String, FilterConfig> parsedConfigs =
ClientXdsClient.parseOverrideFilterConfigs(configOverrides).getStruct();
assertThat(parsedConfigs).hasSize(1);
assertThat(parsedConfigs).containsKey("envoy.fault");
}
@Test
public void parseOverrideFilterConfigs_unsupportedAndRequired() {
HTTPFault httpFault = HTTPFault.newBuilder()
.setDelay(FaultDelay.newBuilder().setFixedDelay(Durations.fromNanos(3000)))
.build();
Map<String, Any> configOverrides = ImmutableMap.of(
"envoy.fault",
Any.pack(httpFault),
"unsupported.filter",
Any.pack(io.envoyproxy.envoy.config.route.v3.FilterConfig.newBuilder()
.setIsOptional(false).setConfig(Any.pack(StringValue.of("string value")))
.build()));
assertThat(ClientXdsClient.parseOverrideFilterConfigs(configOverrides).getErrorDetail())
.isEqualTo(
"HttpFilter [unsupported.filter] is not optional and has an unsupported config type: "
+ "type.googleapis.com/google.protobuf.StringValue");
configOverrides = ImmutableMap.of(
"envoy.fault",
Any.pack(httpFault),
"unsupported.filter",
Any.pack(StringValue.of("string value")));
assertThat(ClientXdsClient.parseOverrideFilterConfigs(configOverrides).getErrorDetail())
.isEqualTo(
"HttpFilter [unsupported.filter] is not optional and has an unsupported config type: "
+ "type.googleapis.com/google.protobuf.StringValue");
}
@Test
public void parseCluster_ringHashLbPolicy_defaultLbConfig() throws ResourceInvalidException {
Cluster cluster = Cluster.newBuilder()
.setName("cluster-foo.googleapis.com")
.setType(DiscoveryType.EDS)
.setEdsClusterConfig(
EdsClusterConfig.newBuilder()
.setEdsConfig(
ConfigSource.newBuilder()
.setAds(AggregatedConfigSource.getDefaultInstance()))
.setServiceName("service-foo.googleapis.com"))
.setLbPolicy(LbPolicy.RING_HASH)
.build();
CdsUpdate update = ClientXdsClient.parseCluster(cluster, new HashSet<String>());
assertThat(update.lbPolicy()).isEqualTo(CdsUpdate.LbPolicy.RING_HASH);
assertThat(update.minRingSize())
.isEqualTo(ClientXdsClient.DEFAULT_RING_HASH_LB_POLICY_MIN_RING_SIZE);
assertThat(update.maxRingSize())
.isEqualTo(ClientXdsClient.DEFAULT_RING_HASH_LB_POLICY_MAX_RING_SIZE);
}
@Test
public void parseCluster_ringHashLbPolicy_invalidRingSizeConfig_minGreaterThanMax()
throws ResourceInvalidException {
Cluster cluster = Cluster.newBuilder()
.setName("cluster-foo.googleapis.com")
.setType(DiscoveryType.EDS)
.setEdsClusterConfig(
EdsClusterConfig.newBuilder()
.setEdsConfig(
ConfigSource.newBuilder()
.setAds(AggregatedConfigSource.getDefaultInstance()))
.setServiceName("service-foo.googleapis.com"))
.setLbPolicy(LbPolicy.RING_HASH)
.setRingHashLbConfig(
RingHashLbConfig.newBuilder()
.setHashFunction(HashFunction.XX_HASH)
.setMinimumRingSize(UInt64Value.newBuilder().setValue(1000L))
.setMaximumRingSize(UInt64Value.newBuilder().setValue(100L)))
.build();
thrown.expect(ResourceInvalidException.class);
thrown.expectMessage("Cluster cluster-foo.googleapis.com: invalid ring_hash_lb_config");
ClientXdsClient.parseCluster(cluster, new HashSet<String>());
}
@Test
public void parseCluster_ringHashLbPolicy_invalidRingSizeConfig_tooLargeRingSize()
throws ResourceInvalidException {
Cluster cluster = Cluster.newBuilder()
.setName("cluster-foo.googleapis.com")
.setType(DiscoveryType.EDS)
.setEdsClusterConfig(
EdsClusterConfig.newBuilder()
.setEdsConfig(
ConfigSource.newBuilder()
.setAds(AggregatedConfigSource.getDefaultInstance()))
.setServiceName("service-foo.googleapis.com"))
.setLbPolicy(LbPolicy.RING_HASH)
.setRingHashLbConfig(
RingHashLbConfig.newBuilder()
.setHashFunction(HashFunction.XX_HASH)
.setMinimumRingSize(UInt64Value.newBuilder().setValue(1000L))
.setMaximumRingSize(
UInt64Value.newBuilder()
.setValue(ClientXdsClient.MAX_RING_HASH_LB_POLICY_RING_SIZE + 1)))
.build();
thrown.expect(ResourceInvalidException.class);
thrown.expectMessage("Cluster cluster-foo.googleapis.com: invalid ring_hash_lb_config");
ClientXdsClient.parseCluster(cluster, new HashSet<String>());
}
@Test
public void parseServerSideListener_invalidTrafficDirection() {
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.OUTBOUND)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail()).isEqualTo("Listener listener1 is not INBOUND");
}
@Test
public void parseServerSideListener_listenerFiltersPresent() {
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addListenerFilters(ListenerFilter.newBuilder().build())
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("Listener listener1 cannot have listener_filters");
}
@Test
public void parseServerSideListener_useOriginalDst() {
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.setUseOriginalDst(BoolValue.of(true))
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("Listener listener1 cannot have use_original_dst set to true");
}
@Test
public void parseServerSideListener_noHcm() {
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(FilterChain.newBuilder().build())
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("filerChain has to have envoy.http_connection_manager");
}
@Test
public void parseServerSideListener_duplicateFilterName() {
FilterChain filterChain =
buildFilterChain(
Filter.newBuilder()
.setName("envoy.http_connection_manager")
.setTypedConfig(Any.pack(HttpConnectionManager.getDefaultInstance()))
.build(),
Filter.newBuilder()
.setName("envoy.http_connection_manager")
.setTypedConfig(Any.pack(HttpConnectionManager.getDefaultInstance()))
.build());
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("filerChain has non-unique filter name:envoy.http_connection_manager");
}
@Test
public void parseServerSideListener_configDiscoveryFilter() {
Filter filter =
Filter.newBuilder()
.setName("envoy.http_connection_manager")
.setConfigDiscovery(ExtensionConfigSource.newBuilder().build())
.build();
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("filter envoy.http_connection_manager with config_discovery not supported");
}
@Test
public void parseServerSideListener_expectTypedConfigFilter() {
Filter filter = Filter.newBuilder().setName("envoy.http_connection_manager").build();
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("filter envoy.http_connection_manager expected to have typed_config");
}
@Test
public void parseServerSideListener_wrongTypeUrl() {
Filter filter =
Filter.newBuilder()
.setName("envoy.http_connection_manager")
.setTypedConfig(Any.newBuilder().setTypeUrl("badTypeUrl"))
.build();
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo(
"filter envoy.http_connection_manager with unsupported typed_config type:badTypeUrl");
}
@Test
public void parseServerSideListener_duplicateHttpFilter() {
Filter filter =
buildHttpConnectionManager(
"envoy.http_connection_manager",
HttpFilter.newBuilder().setName("hf").setIsOptional(true).build(),
HttpFilter.newBuilder().setName("hf").setIsOptional(true).build());
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo("http-connection-manager has non-unique http-filter name:hf");
}
@Test
public void parseServerSideListener_configDiscoveryHttpFilter() {
Filter filter =
buildHttpConnectionManager(
"envoy.http_connection_manager",
HttpFilter.newBuilder()
.setName("envoy.router")
.setConfigDiscovery(ExtensionConfigSource.newBuilder().build())
.build());
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo(
"http-connection-manager http-filter envoy.router uses "
+ "config-discovery which is unsupported");
}
@Test
public void parseServerSideListener_badTypeUrlHttpFilter() {
HTTPFault fault = HTTPFault.newBuilder().build();
Filter filter =
buildHttpConnectionManager(
"envoy.http_connection_manager",
HttpFilter.newBuilder()
.setName("envoy.router")
.setTypedConfig(Any.pack(fault, "type.googleapis.com"))
.build());
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo(
"http-connection-manager http-filter envoy.router has unsupported typed-config type:"
+ "type.googleapis.com/envoy.extensions.filters.http.fault.v3.HTTPFault");
}
@Test
public void parseServerSideListener_missingTypeUrlHttpFilter() {
Filter filter =
buildHttpConnectionManager(
"envoy.http_connection_manager",
HttpFilter.newBuilder().setName("envoy.filters.http.router").build());
FilterChain filterChain = buildFilterChain(filter);
Listener listener =
Listener.newBuilder()
.setName("listener1")
.setTrafficDirection(TrafficDirection.INBOUND)
.addFilterChains(filterChain)
.build();
StructOrError<io.grpc.xds.EnvoyServerProtoData.Listener> struct =
ClientXdsClient.parseServerSideListener(listener, null);
assertThat(struct.getErrorDetail())
.isEqualTo(
"http-connection-manager http-filter envoy.filters.http.router should have "
+ "typed-config type "
+ "type.googleapis.com/envoy.extensions.filters.http.router.v3.Router");
}
static Filter buildHttpConnectionManager(String name, HttpFilter... httpFilters) {
return Filter.newBuilder()
.setName(name)
.setTypedConfig(
Any.pack(
HttpConnectionManager.newBuilder()
.addAllHttpFilters(Arrays.asList(httpFilters))
.build(),
"type.googleapis.com"))
.build();
}
static FilterChain buildFilterChain(Filter... filters) {
return FilterChain.newBuilder()
.setFilterChainMatch(
FilterChainMatch.newBuilder()
.build())
.setTransportSocket(TransportSocket.getDefaultInstance())
.addAllFilters(Arrays.asList(filters))
.build();
}
}
| apache-2.0 |
srinivasans/knowledgecoefficient | model/evaluate_class.php | 3445 | <?php
require_once('model_class.php');
class Evaluate extends Model
{
protected $TestId;
protected $UserId;
protected $Total;
protected $Rank;
protected $Percentile;
protected $ExamTotal;
protected function defineTableName()
{
return('evaluation');
}
protected function defineRelationMap()
{
return (array(
"id"=>"ID",
"test_id"=>"TestId",
"user_id"=>"UserId",
"total"=>"Total",
"rank"=>"Rank",
"percentile"=>"Percentile",
"exam_total"=>"ExamTotal",
));
}
public function getByUserIdTestId($user_id,$test_id)
{
$strQuery='SELECT * FROM `'.$this->defineTableName().'` WHERE `user_id`=:user_id AND `test_id`=:test_id';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$objStatement->bindParam(':user_id',$user_id,PDO::PARAM_INT);
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
$this->setID($arRow['id']);
$this->load();
return $this;
}
else
{
return false;
}
}
public function getByTestId($test_id)
{
$strQuery='SELECT * FROM `'.$this->defineTableName().'` WHERE `test_id`=:test_id ORDER BY `total` DESC';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
$evals=array();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
$e=new Evaluate($this->objPDO);
$e->setID($arRow['id']);
$e->load();
$evals[]=$e;
}
return $evals;
}
public function getAverageMarks()
{
$strQuery='SELECT avg(`total`) as `avg` FROM `'.$this->defineTableName().'` WHERE `test_id`=:test_id';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$test_id=$this->TestId;
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
return round($arRow['avg'],2);
}
return 0;
}
public function getTotalMarks()
{
$strQuery='SELECT avg(`exam_total`) as `avg` FROM `'.$this->defineTableName().'` WHERE `test_id`=:test_id';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$test_id=$this->TestId;
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
return round($arRow['avg'],2);
}
return 0;
}
public function getMaxMarks()
{
$strQuery='SELECT max(`total`) as `max` FROM `'.$this->defineTableName().'` WHERE `test_id`=:test_id';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$test_id=$this->TestId;
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
return $arRow['max'];
}
return 0;
}
public function deleteByUserIdTestId($user_id,$test_id)
{
$strQuery='SELECT * FROM `'.$this->defineTableName().'` WHERE `user_id`=:user_id AND `test_id`=:test_id';
unset($objStatement);
$objStatement=$this->objPDO->prepare($strQuery);
$objStatement->bindParam(':user_id',$user_id,PDO::PARAM_INT);
$objStatement->bindParam(':test_id',$test_id,PDO::PARAM_INT);
$objStatement->execute();
if($arRow=$objStatement->fetch(PDO::FETCH_ASSOC))
{
$obj=new Evaluate($this->objPDO,$arRow['id']);
$obj->markForDeletion();
return true;
}
else
{
return false;
}
}
};
?> | apache-2.0 |
yuluows/zkclient | src/main/java/com/api6/zkclient/watcher/ZKWatcherProcess.java | 15988 | /**
*Copyright 2016 zhaojie
*
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing, software
*distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*See the License for the specific language governing permissions and
*limitations under the License.
*/
package com.api6.zkclient.watcher;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher.Event.EventType;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.api6.zkclient.ZKClient;
import com.api6.zkclient.event.ZKEvent;
import com.api6.zkclient.event.ZKEventThreadPool;
import com.api6.zkclient.exception.ZKNoNodeException;
import com.api6.zkclient.listener.ZKChildDataListener;
import com.api6.zkclient.listener.ZKListener;
import com.api6.zkclient.listener.ZKNodeListener;
import com.api6.zkclient.listener.ZKStateListener;
/**
* 事件处理类,接受监听器,并回调
* @author: zhaojie/zh_jie@163.com.com
*/
public class ZKWatcherProcess {
private static Logger LOG = LoggerFactory.getLogger(ZKWatcherProcess.class);
private final ZKEventThreadPool eventThreadPool;
private final ZKClient client;
public ZKWatcherProcess(ZKClient zkClient) {
this.client = zkClient;
//创建事件处理线程池
eventThreadPool = new ZKEventThreadPool(zkClient.getEventThreadPoolSize());
}
/**
* 停止处理
* @return void
*/
public void stop(){
eventThreadPool.destory();
}
public void processStateChanged(WatchedEvent event) {
final KeeperState keeperState = event.getState();
LOG.info("ZooKeeper state is changed [" + keeperState + "] .");
//这里需要更新一下,ZooKeeper客户端的状态
client.setCurrentState(keeperState);
if (client.getShutdownTrigger()) {
return;
}
//获取所有的事件监听器
Set<ZKStateListener> listeners = client.getStateListenerSet();
//状态改变事件处理
for (final ZKStateListener stateListener : listeners) {
eventThreadPool.submit(new ZKEvent("State changed to " + keeperState + " sent to " + stateListener) {
@Override
public void run() throws Exception {
stateListener.handleStateChanged(keeperState);
}
});
}
//如果会话过期,要重新连接服务器
if (event.getState() == KeeperState.Expired) {
try {
//会话过期,重新连接
client.reconnect();
//会话过期事件处理
for (final ZKStateListener stateListener : listeners) {
ZKEvent zkEvent = new ZKEvent("New session event sent to " + stateListener) {
@Override
public void run() throws Exception {
stateListener.handleNewSession();
}
};
eventThreadPool.submit(zkEvent);
}
} catch (final Exception e) {
LOG.info("Unable to re-establish connection. Notifying consumer of the following exception: ", e);
//会话过期后重连出错,事件处理
for (final ZKStateListener stateListener : listeners) {
eventThreadPool.submit(new ZKEvent("Session establishment error[" + e + "] sent to " + stateListener) {
@Override
public void run() throws Exception {
stateListener.handleSessionError(e);
}
});
}
}
}
}
/**
* 处理子节点变化事件
* @param event
* @return void
*/
public void processChildChanged(final WatchedEvent event){
final String path = event.getPath();
final Set<ZKListener> listeners = client.getChildListenerMap().get(path);
//提交事件监听进行处理
submitChildEvent(listeners,path,event.getType());
}
/**
* 处理数据改变事件
* @param event
* @return void
*/
public void processNodeChanged(final WatchedEvent event){
final String path = event.getPath();
final EventType eventType = event.getType();
final Set<ZKListener> listeners = client.getNodeListenerMap().get(path);
if (listeners == null || listeners.isEmpty()) {
return;
}
//如果listeners中如果有ZKChildDataListener类型的监听器,
//证明是此节点是某个节点的子节点,并监听此节点的数据变化
//这里要单独拿出来,用于触发ZKChildDataListener
final Set<ZKListener> childDataChangeListners = new CopyOnWriteArraySet<>();
final Set<ZKListener> nodeListners = new CopyOnWriteArraySet<>();
classifyListeners(listeners,nodeListners,childDataChangeListners);
//提交事件监听进行处理
submitNodeEvent(nodeListners,childDataChangeListners,path,eventType);
//当前节点作为子节点数据变化
if(eventType == EventType.NodeDataChanged){
//提交事件监听进行处理
submitChildDataEvent(childDataChangeListners,path,eventType);
}
}
/**
* 触发所有的监听器,用于在会话失效后调用。
* 会话失效后,服务端会取消watch,
* 如果在会话失效后与重连这段时间内有数据发生变化,监听器是无法监听到的,
* 所以要调用此方法,触发所有监听器,告诉监听器,会话失效,可能存在数据变化(不是一定有变化)。
* @param eventType
* @return void
* @author: zhaojie/zh_jie@163.com
*/
public void processAllNodeAndChildListeners(final WatchedEvent event){
LOG.debug("processAllNodeAndChildListeners....");
//对选取的监听器进行处理
for (Entry<String, CopyOnWriteArraySet<ZKListener>> entry : client.getNodeListenerMap().entrySet()) {
Set<ZKListener> nodeListners = new CopyOnWriteArraySet<ZKListener>();
Set<ZKListener> childDataChangeListners = new CopyOnWriteArraySet<ZKListener>();
Set<ZKListener> listeners = entry.getValue();
classifyListeners(listeners,nodeListners,childDataChangeListners);
//提交事件监听进行处理
submitNodeEvent(nodeListners, childDataChangeListners, entry.getKey(), event.getType());
}
//获取所有的节点监听器,并进行处理
for (Entry<String, CopyOnWriteArraySet<ZKListener>> entry : client.getChildListenerMap().entrySet()) {
//提交事件监听进行处理
submitChildEvent(entry.getValue(),entry.getKey(),event.getType());
}
}
/**
* 对listeners进行分类整理,把{@link ZKNodeListener}类型的放入nodeListeners,把{@link ZKChildDataListener} 类型的放入childDataChangeListeners
* @param listeners
* @param nodeListeners
* @param childDataChangeListeners
* @return void
*/
private void classifyListeners(Set<ZKListener> listeners,Set<ZKListener> nodeListeners,Set<ZKListener> childDataChangeListeners){
for(ZKListener listener : listeners){
if(listener instanceof ZKChildDataListener){
if(!childDataChangeListeners.contains(listener)){
childDataChangeListeners.add(listener);
}
}else{
if(!nodeListeners.contains(listener)){
nodeListeners.add(listener);
}
}
}
}
/**
* 提交节点改变相关的事件进行处理
* @param listeners
* @param childDataChangeListners
* @param path
* @param eventType
* @return void
*/
private void submitNodeEvent(final Set<ZKListener> listeners,final Set<ZKListener> childDataChangeListners,final String path,final EventType eventType ){
if (listeners != null && !listeners.isEmpty()) {
for (final ZKListener listener : listeners) {
ZKEvent zkEvent = new ZKEvent("Node of " + path + " changed sent to " + listener) {
@Override
public void run() throws Exception {
//原生的zookeeper 的监听只生效一次,重新注册监听
LOG.debug("Rewatch the path ["+path+"] by exists method");
boolean flag = client.exists(path, true);
LOG.debug("Rewatched the path ["+path+"] by exists method");
try {
LOG.debug("Rewatch and get changed data [path:"+path+" | EventType:"+eventType+"] by getData method");
Object data = client.getData(path, null);
LOG.debug("Rewatched and return data ["+path+" | "+data+" | EventType:"+eventType+"] by getData method");
listener.handle(path, eventType, data);
//响应了删除事件,但是在再次注册监听之前节点又被创建了,这样是无法重新监听到节点创建的
//这里主动触发节点创建事件。
if (eventType == EventType.NodeDeleted && flag) {
listener.handle(path, EventType.NodeCreated, data);
}
} catch (ZKNoNodeException e) {
//如果是节点不存在了,则只移除,ZKChildDataListener监听器
client.unlistenNodeChanges(path, childDataChangeListners);
//如果路径不存在,在调用client.getData(path,null)会抛出异常
listener.handle(path, eventType, null);
//如果是创建节点事件,并且在创建事件收到后,监听还没来得及重新注册,刚创建的节点已经被删除了。
//对于这种情况,客户端就无法重新监听到节点的删除事件的,这里做特殊处理
//主动触发删除的监听事件
if (eventType == EventType.NodeCreated && !flag) {
listener.handle(path, EventType.NodeDeleted, null);
}
}
}
};
eventThreadPool.submit(zkEvent);
}
}
}
/**
* 提交子节点改变相关的事件进行处理
* @param listeners
* @param path
* @param eventType
* @return void
*/
private void submitChildEvent(final Set<ZKListener> listeners,final String path,final EventType eventType){
if (listeners != null && !listeners.isEmpty()) {
try {
for (final ZKListener listener : listeners) {
//创建事件,并在独立的事件处理线程池中执行
ZKEvent zkEvent = new ZKEvent("Children of " + path + " changed sent to " + listener) {
@Override
public void run() throws Exception {
//原生的zookeeper 的监听只生效一次,所以要重新注册父节点的监听
LOG.debug("Rewatch the path ["+path+"] by exists method");
client.exists(path);
LOG.debug("Rewatched the path ["+path+"] by exists method");
try {
LOG.debug("Rewatch and get chilldren [path:"+path+" | EventType:"+eventType+"] by getChildren method");
//获取数据并重新监听子节点变化
List<String> children = client.getChildren(path);
LOG.debug("Rewatched and return children [children:"+children+" | EventType:"+eventType+"] by getChildren method");
//子节点数量改变,如果当前路径path设置了ZKChildDataListener。
//则尝试重新注册子节点数据变化的监听
client.listenNewChildPathWithData(path,children);
listener.handle(path, eventType, children);
} catch (ZKNoNodeException e) {
//如果路径不存在,在调用client.getChildren(path)会抛出异常
listener.handle(path, eventType, null);
}
}
};
eventThreadPool.submit(zkEvent);
}
} catch (Exception e) {
LOG.error("Failed to fire child changed event. Unable to getChildren. ", e);
}
}
}
/**
* 提交子节点数据改变的事件进行处理
* @param listeners
* @param path
* @param eventType
* @return void
*/
private void submitChildDataEvent(final Set<ZKListener> listeners,final String path,final EventType eventType){
if (listeners != null && !listeners.isEmpty()) {
for (final ZKListener listener : listeners) {
ZKEvent zkEvent = new ZKEvent("Children of " + path + " changed sent to "+ listener) {
@Override
public void run() throws Exception {
//原生的zookeeper 的监听只生效一次,重新注册监听
LOG.debug("rewatch the path ["+path+"]");
client.exists(path, true);
LOG.debug("rewatched the path ["+path+"]");
try {
LOG.debug("Try to get child changed data [path:"+path+" | EventType:"+eventType+"]");
//在事件触发后到获取数据之间的时间,节点的值是可能改变的,
//所以此处的获取也只是,获取到最新的值,而不一定是事件触发时的值
//重新监听节点变化
Object data = client.getData(path, null);
LOG.debug("Child changed data is [path:"+path+" | data:"+data+" | EventType:"+eventType+"]");
listener.handle(path, eventType, data);
} catch (ZKNoNodeException e) {
//ignore
}
}
};
eventThreadPool.submit(zkEvent);
}
}
}
}
| apache-2.0 |
Unicon/cas | support/cas-server-support-consent-core/src/main/java/org/apereo/cas/consent/BaseConsentRepository.java | 2866 | package org.apereo.cas.consent;
import org.apereo.cas.authentication.Authentication;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.util.RandomUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.stream.Collectors;
/**
* This is {@link BaseConsentRepository}.
*
* @author Misagh Moayyed
* @since 5.2.0
*/
public abstract class BaseConsentRepository implements ConsentRepository {
private static final long serialVersionUID = 1736846688546785564L;
private Set<ConsentDecision> consentDecisions;
public BaseConsentRepository() {
this.consentDecisions = new LinkedHashSet<>();
}
@Override
public ConsentDecision findConsentDecision(final Service service, final RegisteredService registeredService,
final Authentication authentication) {
return this.consentDecisions
.stream()
.filter(d -> d.getPrincipal().equals(authentication.getPrincipal().getId())
&& d.getService().equals(service.getId()))
.findFirst()
.orElse(null);
}
@Override
public Collection<ConsentDecision> findConsentDecisions(final String principal) {
return this.consentDecisions
.stream()
.filter(d -> d.getPrincipal().equals(principal))
.collect(Collectors.toSet());
}
@Override
public Collection<ConsentDecision> findConsentDecisions() {
return new ArrayList<>(this.consentDecisions);
}
@Override
public boolean storeConsentDecision(final ConsentDecision decision) {
final ConsentDecision consent = getConsentDecisions()
.stream()
.filter(d -> d.getId() == decision.getId())
.findFirst()
.orElse(null);
if (consent != null) {
getConsentDecisions().remove(decision);
} else {
decision.setId(Math.abs(RandomUtils.getInstanceNative().nextInt()));
}
getConsentDecisions().add(decision);
return true;
}
@Override
public boolean deleteConsentDecision(final long decisionId, final String principal) {
final Collection<ConsentDecision> decisions = findConsentDecisions(principal);
return this.consentDecisions.remove(decisions
.stream()
.filter(d -> d.getId() == decisionId)
.findFirst().get());
}
protected Set<ConsentDecision> getConsentDecisions() {
return this.consentDecisions;
}
protected void setConsentDecisions(final Set<ConsentDecision> consentDecisions) {
this.consentDecisions = consentDecisions;
}
}
| apache-2.0 |
cmoulliard/apiman | manager/api/beans/src/main/java/io/apiman/manager/api/beans/plans/PlanBean.java | 4141 | /*
* Copyright 2014 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.manager.api.beans.plans;
import io.apiman.manager.api.beans.orgs.OrganizationBasedCompositeId;
import io.apiman.manager.api.beans.orgs.OrganizationBean;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.IdClass;
import javax.persistence.JoinColumn;
import javax.persistence.JoinColumns;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
/**
* Models a plan.
*
* @author eric.wittmann@redhat.com
*/
@Entity
@Table(name = "plans")
@IdClass(OrganizationBasedCompositeId.class)
@JsonInclude(Include.NON_NULL)
public class PlanBean implements Serializable, Cloneable {
private static final long serialVersionUID = -7961331943587584049L;
@Id
@ManyToOne
@JoinColumns({
@JoinColumn(name="organization_id", referencedColumnName="id")
})
private OrganizationBean organization;
@Id
@Column(nullable=false)
private String id;
@Column(nullable=false)
private String name;
@Column(updatable=true, nullable=true, length=512)
private String description;
@Column(name = "created_by", updatable=false, nullable=false)
private String createdBy;
@Column(name = "created_on", updatable=false, nullable=false)
private Date createdOn;
/**
* @return the id
*/
public String getId() {
return id;
}
/**
* @param id the id to set
*/
public void setId(String id) {
this.id = id;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the createdOn
*/
public Date getCreatedOn() {
return createdOn;
}
/**
* @param createdOn the createdOn to set
*/
public void setCreatedOn(Date createdOn) {
this.createdOn = createdOn;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the createdBy
*/
public String getCreatedBy() {
return createdBy;
}
/**
* @param createdBy the createdBy to set
*/
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/**
* @return the organization
*/
public OrganizationBean getOrganization() {
return organization;
}
/**
* @param organization the organization to set
*/
public void setOrganization(OrganizationBean organization) {
this.organization = organization;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
@SuppressWarnings("nls")
public String toString() {
return "PlanBean [organization=" + organization + ", id=" + id + ", name=" + name + ", description="
+ description + ", createdBy=" + createdBy + ", createdOn=" + createdOn + "]";
}
/**
* @see java.lang.Object#clone()
*/
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
| apache-2.0 |