repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
puppetlabs/pxp-agent | lib/tests/unit/modules/command_test.cc | 7211 | #include <catch.hpp>
#include "root_path.hpp"
#include "../../common/content_format.hpp"
#include <boost/algorithm/string.hpp>
#include <leatherman/execution/execution.hpp>
#include <leatherman/util/scope_exit.hpp>
#include <leatherman/file_util/file.hpp>
#include <pxp-agent/configuration.hpp>
#include <pxp-agent/modules/command.hpp>
using namespace PXPAgent;
namespace fs = boost::filesystem;
namespace lth_exe = leatherman::execution;
namespace lth_file = leatherman::file_util;
namespace lth_jc = leatherman::json_container;
static const std::string SPOOL_DIR { std::string { PXP_AGENT_ROOT_PATH }
+ "/lib/tests/resources/test_spool" };
static const auto STORAGE = std::make_shared<ResultsStorage>(SPOOL_DIR, "0d");
static void configureTest() {
if (!fs::exists(SPOOL_DIR) && !fs::create_directories(SPOOL_DIR)) {
FAIL("Failed to create the results directory");
}
Configuration::Instance().initialize(
[](std::vector<std::string>) {
return EXIT_SUCCESS;
});
}
static void resetTest() {
if (fs::exists(SPOOL_DIR)) {
fs::remove_all(SPOOL_DIR);
}
}
static ActionRequest command_request(const std::string& params_txt, std::string transaction_id = "0987") {
std::string command_txt {
(NON_BLOCKING_DATA_FORMAT % boost::io::quoted(transaction_id)
% "\"command\""
% "\"run\""
% params_txt
% "false").str()
};
PCPClient::ParsedChunks command_content {
lth_jc::JsonContainer(ENVELOPE_TXT),
lth_jc::JsonContainer(command_txt),
{},
0
};
ActionRequest request { RequestType::NonBlocking, command_content };
fs::path spool_path { SPOOL_DIR };
auto results_dir = (spool_path / request.transactionId()).string();
fs::create_directories(results_dir);
request.setResultsDir(results_dir);
return request;
}
TEST_CASE("Modules::Command", "[modules]") {
SECTION("can successfully instantiate") {
REQUIRE_NOTHROW(Modules::Command(PXP_AGENT_BIN_PATH, STORAGE));
}
}
TEST_CASE("Modules::Command::hasAction", "[modules]") {
Modules::Command mod { PXP_AGENT_BIN_PATH, STORAGE };
SECTION("correctly reports false for a nonexistent action") {
REQUIRE_FALSE(mod.hasAction("foo"));
}
SECTION("correctly reports true for a real action") {
REQUIRE(mod.hasAction("run"));
}
}
TEST_CASE("Modules::Command::callAction successfully", "[modules]") {
configureTest();
lth_util::scope_exit config_cleaner { resetTest };
Modules::Command mod { PXP_AGENT_BIN_PATH, STORAGE };
#ifdef _WIN32
static const std::string echo_params { "{ \"command\": \"write-host hello\" }" };
#else
static const std::string echo_params { "{ \"command\": \"echo hello\" }" };
#endif
auto request = command_request(echo_params);
auto response = mod.executeAction(request);
SECTION("stdout is written to a file") {
boost::trim(response.output.std_out);
REQUIRE(response.output.std_out == "hello");
}
SECTION("stderr is written to a file") {
REQUIRE(response.output.std_err == "");
}
SECTION("exit code is written to a file") {
REQUIRE(response.output.exitcode == 0);
}
SECTION("PID is written to a file") {
fs::path spool_path { SPOOL_DIR };
auto pid_path = spool_path / request.transactionId() / "pid";
REQUIRE(fs::exists(pid_path));
try {
auto pid_txt = lth_file::read(pid_path.string());
auto pid = std::stoi(pid_txt);
} catch (std::exception&) {
FAIL("Failed to read PID");
}
}
}
TEST_CASE("Modules::Command::callAction where the command is not found", "[modules]") {
configureTest();
lth_util::scope_exit config_cleaner { resetTest };
Modules::Command mod { PXP_AGENT_BIN_PATH, STORAGE };
static const std::string missing_command { "{ \"command\": \"not-a-real-command\" }" };
auto request = command_request(missing_command);
auto response = mod.executeAction(request);
SECTION("the execution result itself is still valid") {
REQUIRE(response.action_metadata.get<bool>("results_are_valid"));
}
SECTION("stdout is written to a file and is empty") {
REQUIRE(response.output.std_out == "");
}
// The error message doesn't really matter, really we are just testing that
// stderr isn't empty. For posterity: all shells, regardless of platform,
// will likely output something about not-a-real-command not existing. Thus
// we will just check that stderr contains the command name for the thing
// that didn't exist
static const std::string expected_stderr { "not-a-real-command" };
SECTION("stderr is written to a file and is empty") {
REQUIRE(response.output.std_err.find(expected_stderr) != std::string::npos);
}
#ifdef _WIN32
// Powershell always exits 1 on an uncaught error.
int expected_exit_code = 1;
#else
int expected_exit_code = 127;
#endif
SECTION("exit code is written to a file and read into the result object") {
REQUIRE(response.output.exitcode == expected_exit_code);
}
SECTION("PID is written to a file") {
fs::path spool_path { SPOOL_DIR };
auto pid_path = spool_path / request.transactionId() / "pid";
REQUIRE(fs::exists(pid_path));
try {
auto pid_txt = lth_file::read(pid_path.string());
auto pid = std::stoi(pid_txt);
} catch (std::exception&) {
FAIL("Failed to read PID");
}
}
}
TEST_CASE("Modules::Command::callAction where the command fails", "[modules]") {
configureTest();
lth_util::scope_exit config_cleaner { resetTest };
Modules::Command mod { PXP_AGENT_BIN_PATH, STORAGE };
#ifdef _WIN32
static const std::string bad_command { "{ \"command\": \"powershell.exe -not-a-real-option\" }" };
#else
static const std::string bad_command { "{ \"command\": \"ls -not-a-real-option\" }" };
#endif
auto request = command_request(bad_command);
auto response = mod.executeAction(request);
SECTION("the execution result itself is still valid") {
REQUIRE(response.action_metadata.get<bool>("results_are_valid"));
}
SECTION("stdout is written to a file and is empty") {
REQUIRE(response.output.std_out == "");
}
SECTION("stderr is written to a file and contains a message") {
REQUIRE_FALSE(response.output.std_err == "");
}
SECTION("exit code is written to a file and indicates failure") {
REQUIRE(response.output.exitcode > 0);
REQUIRE(response.output.exitcode != 127); // This would be the "not found" exit code
}
SECTION("PID is written to a file") {
fs::path spool_path { SPOOL_DIR };
auto pid_path = spool_path / request.transactionId() / "pid";
REQUIRE(fs::exists(pid_path));
try {
auto pid_txt = lth_file::read(pid_path.string());
auto pid = std::stoi(pid_txt);
} catch (std::exception&) {
FAIL("Failed to read PID");
}
}
}
| apache-2.0 |
tngraf/Tethys.Silverlight | Tethys.Silverlight.WPF/Support/DialogHelper.cs | 12465 | #region Header
// --------------------------------------------------------------------------
// Tethys.Silverlight
// ==========================================================================
//
// This library contains common code for WPF, Silverlight, Windows Phone and
// Windows 8 projects.
//
// ===========================================================================
//
// <copyright file="DialogHelper.cs" company="Tethys">
// Copyright 2010-2015 by Thomas Graf
// All rights reserved.
// Licensed under the Apache License, Version 2.0.
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied.
// </copyright>
//
// System ... Microsoft .Net Framework 4.5
// Tools .... Microsoft Visual Studio 2013
//
// ---------------------------------------------------------------------------
#endregion
namespace Tethys.Silverlight.Support
{
using System;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Interop;
using Tethys.Silverlight.Helper;
/// <summary>
/// Helper class to show/hide dialog title bar buttons.
/// Based on code of <c>Jörg Neumann</c> published in DotNetPro 06/2012.
/// </summary>
public static class DialogHelper
{
#region PROPERTIES
#region ShowMinimizeButton
/// <summary>
/// ShowMinimizeButtonProperty DependencyProperty.
/// </summary>
public static readonly DependencyProperty ShowMinimizeButtonProperty =
DependencyProperty.RegisterAttached("ShowMinimizeButton", typeof(bool),
typeof(DialogHelper),
new UIPropertyMetadata(true, OnButtonChanged));
/// <summary>
/// Gets the show minimize button.
/// </summary>
/// <param name="element">The element.</param>
/// <returns>The show minimize button value.</returns>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static bool GetShowMinimizeButton(DependencyObject element)
{
return (bool)element.GetValue(ShowMinimizeButtonProperty);
}
/// <summary>
/// Sets the show minimize button.
/// </summary>
/// <param name="element">The element.</param>
/// <param name="value">if set to <c>true</c> [value].</param>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static void SetShowMinimizeButton(DependencyObject element, bool value)
{
element.SetValue(ShowMinimizeButtonProperty, value);
}
#endregion
#region ShowMaximizeButton
/// <summary>
/// ShowMaximizeButtonProperty DependencyProperty.
/// </summary>
public static readonly DependencyProperty ShowMaximizeButtonProperty =
DependencyProperty.RegisterAttached("ShowMaximizeButton", typeof(bool), typeof(DialogHelper),
new UIPropertyMetadata(true, OnButtonChanged));
/// <summary>
/// Gets the show maximize button.
/// </summary>
/// <param name="element">The element.</param>
/// <returns>The show maximize button value.</returns>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static bool GetShowMaximizeButton(DependencyObject element)
{
return (bool)element.GetValue(ShowMaximizeButtonProperty);
}
/// <summary>
/// Sets the show maximize button.
/// </summary>
/// <param name="element">The element.</param>
/// <param name="value">if set to <c>true</c> [value].</param>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static void SetShowMaximizeButton(DependencyObject element, bool value)
{
element.SetValue(ShowMaximizeButtonProperty, value);
}
#endregion
#region ShowHelpButton
/// <summary>
/// ShowHelpButtonProperty DependencyProperty.
/// </summary>
public static readonly DependencyProperty ShowHelpButtonProperty =
DependencyProperty.RegisterAttached("ShowHelpButton", typeof(bool), typeof(DialogHelper),
new UIPropertyMetadata(false, OnButtonChanged));
/// <summary>
/// Gets the show help button.
/// </summary>
/// <param name="element">The element.</param>
/// <returns>The show help button value.</returns>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static bool GetShowHelpButton(DependencyObject element)
{
return (bool)element.GetValue(ShowHelpButtonProperty);
}
/// <summary>
/// Sets the show help button.
/// </summary>
/// <param name="element">The element.</param>
/// <param name="value">if set to <c>true</c> [value].</param>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static void SetShowHelpButton(DependencyObject element, bool value)
{
element.SetValue(ShowHelpButtonProperty, value);
}
#endregion
#region WindowStartupLocation
/// <summary>
/// WindowStartupLocationProperty DependencyProperty.
/// </summary>
public static readonly DependencyProperty WindowStartupLocationProperty =
DependencyProperty.RegisterAttached("WindowStartupLocation",
typeof(WindowStartupLocation), typeof(DialogHelper),
new PropertyMetadata(WindowStartupLocation.Manual,
OnWindowStartupLocationPropertyChanged));
/// <summary>
/// Called when the window startup location property has changed.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The <see cref="DependencyPropertyChangedEventArgs"/>
/// instance containing the event data.</param>
private static void OnWindowStartupLocationPropertyChanged(
DependencyObject sender, DependencyPropertyChangedEventArgs e)
{
var window = sender as Window;
if (window != null)
{
window.WindowStartupLocation = (WindowStartupLocation)e.NewValue;
}
}
/// <summary>
/// Gets the window startup location.
/// </summary>
/// <param name="element">The element.</param>
/// <returns>The window startup location value.</returns>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static WindowStartupLocation GetWindowStartupLocation(DependencyObject element)
{
return (WindowStartupLocation)element.GetValue(WindowStartupLocationProperty);
}
/// <summary>
/// Sets the window startup location.
/// </summary>
/// <param name="element">The element.</param>
/// <param name="value">The value.</param>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static void SetWindowStartupLocation(DependencyObject element, WindowStartupLocation value)
{
element.SetValue(WindowStartupLocationProperty, value);
}
#endregion
#region ShowIcon
/// <summary>
/// HideIconProperty DependencyProperty.
/// </summary>
public static readonly DependencyProperty HideIconProperty =
DependencyProperty.RegisterAttached("HideIcon", typeof(bool),
typeof(DialogHelper),
new UIPropertyMetadata(false, OnButtonChanged));
/// <summary>
/// Gets the hide icon.
/// </summary>
/// <param name="element">The element.</param>
/// <returns>the hide icon value.</returns>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static bool GetHideIcon(DependencyObject element)
{
return (bool)element.GetValue(HideIconProperty);
}
/// <summary>
/// Sets the hide icon.
/// </summary>
/// <param name="element">The element.</param>
/// <param name="value">if set to <c>true</c> [value].</param>
[AttachedPropertyBrowsableForType(typeof(Window))]
public static void SetHideIcon(DependencyObject element, bool value)
{
element.SetValue(HideIconProperty, value);
}
#endregion
#endregion
//// ----------------------------------------------------------------------
#region Event Handling
/// <summary>
/// Called when a button has changed.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The
/// <see cref="System.Windows.DependencyPropertyChangedEventArgs"/>
/// instance containing the event data.</param>
private static void OnButtonChanged(DependencyObject sender,
DependencyPropertyChangedEventArgs e)
{
var window = sender as Window;
if (window != null)
{
var handle = new WindowInteropHelper(window).Handle;
if (handle == IntPtr.Zero)
{
window.SourceInitialized += OnSourceInitialized;
}
else
{
UpdateStyle(window);
} // if
} // if
} // OnButtonChanged()
/// <summary>
/// Called when the source has been initialized.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The <see cref="System.EventArgs"/> instance
/// containing the event data.</param>
private static void OnSourceInitialized(object sender, EventArgs e)
{
var window = sender as Window;
if (window != null)
{
window.SourceInitialized -= OnSourceInitialized;
UpdateStyle(window);
} // if
} // OnSourceInitialized()
#endregion
//// ----------------------------------------------------------------------
#region PRIVATE METHODS
/// <summary>
/// Updates the style.
/// </summary>
/// <param name="window">The window.</param>
private static void UpdateStyle(Window window)
{
var handle = new WindowInteropHelper(window).Handle;
var style = NativeMethods.GetWindowLong(handle, NativeMethods.Style);
if (GetShowMaximizeButton(window))
{
style |= NativeMethods.MaximizeBox;
}
else
{
style &= ~NativeMethods.MaximizeBox;
} // if
if (GetShowMinimizeButton(window))
{
style |= NativeMethods.MinimizeBox;
}
else
{
style &= ~NativeMethods.MinimizeBox;
} // if
NativeMethods.SetWindowLong(handle, NativeMethods.Style, style);
var extendedStyle = NativeMethods.GetWindowLong(handle, NativeMethods.ExtStyle);
if (GetShowHelpButton(window))
{
extendedStyle |= NativeMethods.ContextHelp;
}
else
{
extendedStyle &= -(~NativeMethods.ContextHelp);
} // if
if (GetHideIcon(window))
{
extendedStyle |= NativeMethods.DialogModalFrame;
}
else
{
extendedStyle &= -(~NativeMethods.DialogModalFrame);
} // if
NativeMethods.SetWindowLong(handle, NativeMethods.ExtStyle, extendedStyle);
// Update the window's non-client area to reflect the changes
NativeMethods.SetWindowPos(handle, IntPtr.Zero, 0, 0, 0, 0,
NativeMethods.NoMove | NativeMethods.NoSize | NativeMethods.NoZOrder | NativeMethods.SwpFramechanged);
if (GetHideIcon(window))
{
NativeMethods.SendMessage(handle, NativeMethods.Seticon, IntPtr.Zero, IntPtr.Zero);
} // if
} // UpdateStyle()
#endregion // PRIVATE METHODS
} // DialogHelper
} // Tethys.Silverlight.Support
| apache-2.0 |
serban-petrescu/sbwo | src/main/java/spet/sbwo/integration/web/rojustportal/Query.java | 3356 | package spet.sbwo.integration.web.rojustportal;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.namespace.QName;
import javax.xml.ws.Service;
import javax.xml.ws.WebEndpoint;
import javax.xml.ws.WebServiceClient;
import javax.xml.ws.WebServiceFeature;
import java.net.MalformedURLException;
import java.net.URL;
/**
* This class was generated by Apache CXF 3.1.7 2017-02-11T20:07:35.861+02:00
* Generated source version: 3.1.7
*/
@WebServiceClient(name = "Query", wsdlLocation = "http://portalquery.just.ro/query.asmx?WSDL", targetNamespace =
"portalquery.just.ro")
public class Query extends Service {
public final static URL WSDL_LOCATION;
public final static QName SERVICE = new QName("portalquery.just.ro", "Query");
public final static QName QuerySoap12 = new QName("portalquery.just.ro", "QuerySoap12");
public final static QName QuerySoap = new QName("portalquery.just.ro", "QuerySoap");
private static final Logger LOG = LoggerFactory.getLogger(Service.class);
static {
URL url = null;
try {
url = new URL("http://portalquery.just.ro/query.asmx?WSDL");
} catch (MalformedURLException e) {
LOG.info("Can not initialize the default wsdl from {}", "http://portalquery.just.ro/query.asmx?WSDL");
}
WSDL_LOCATION = url;
}
public Query(URL wsdlLocation) {
super(wsdlLocation, SERVICE);
}
public Query(URL wsdlLocation, QName serviceName) {
super(wsdlLocation, serviceName);
}
public Query() {
super(WSDL_LOCATION, SERVICE);
}
public Query(WebServiceFeature... features) {
super(WSDL_LOCATION, SERVICE, features);
}
public Query(URL wsdlLocation, WebServiceFeature... features) {
super(wsdlLocation, SERVICE, features);
}
public Query(URL wsdlLocation, QName serviceName, WebServiceFeature... features) {
super(wsdlLocation, serviceName, features);
}
/**
* @return returns QuerySoap
*/
@WebEndpoint(name = "QuerySoap12")
public QuerySoap getQuerySoap12() {
return super.getPort(QuerySoap12, QuerySoap.class);
}
/**
* @param features A list of {@link javax.xml.ws.WebServiceFeature} to configure
* on the proxy. Supported features not in the
* <code>features</code> parameter will have their default
* values.
* @return returns QuerySoap
*/
@WebEndpoint(name = "QuerySoap12")
public QuerySoap getQuerySoap12(WebServiceFeature... features) {
return super.getPort(QuerySoap12, QuerySoap.class, features);
}
/**
* @return returns QuerySoap
*/
@WebEndpoint(name = "QuerySoap")
public QuerySoap getQuerySoap() {
return super.getPort(QuerySoap, QuerySoap.class);
}
/**
* @param features A list of {@link javax.xml.ws.WebServiceFeature} to configure
* on the proxy. Supported features not in the
* <code>features</code> parameter will have their default
* values.
* @return returns QuerySoap
*/
@WebEndpoint(name = "QuerySoap")
public QuerySoap getQuerySoap(WebServiceFeature... features) {
return super.getPort(QuerySoap, QuerySoap.class, features);
}
}
| apache-2.0 |
apereo/cas | support/cas-server-support-google-analytics/src/main/java/org/apereo/cas/web/flow/CreateGoogleAnalyticsCookieAction.java | 2785 | package org.apereo.cas.web.flow;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.util.CollectionUtils;
import org.apereo.cas.util.RegexUtils;
import org.apereo.cas.web.cookie.CasCookieBuilder;
import org.apereo.cas.web.flow.actions.BaseCasWebflowAction;
import org.apereo.cas.web.support.WebUtils;
import org.apereo.cas.web.support.gen.CookieRetrievingCookieGenerator;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.apache.commons.lang3.StringUtils;
import org.springframework.webflow.execution.Event;
import org.springframework.webflow.execution.RequestContext;
import java.util.LinkedHashMap;
import java.util.stream.Collectors;
/**
* This is {@link CreateGoogleAnalyticsCookieAction}.
*
* @author Misagh Moayyed
* @since 6.1.0
*/
@Slf4j
@RequiredArgsConstructor
public class CreateGoogleAnalyticsCookieAction extends BaseCasWebflowAction {
private final CasConfigurationProperties casProperties;
private final CasCookieBuilder googleAnalyticsCookieBuilder;
@Override
public Event doExecute(final RequestContext requestContext) {
val authn = WebUtils.getAuthentication(requestContext);
val attributes = new LinkedHashMap<>(authn.getAttributes());
attributes.putAll(authn.getPrincipal().getAttributes());
val cookie = casProperties.getGoogleAnalytics().getCookie();
val attributeName = cookie.getAttributeName();
val attributeValuePattern = RegexUtils.createPattern(cookie.getAttributeValuePattern());
LOGGER.trace("Available attributes are [{}] examined against cookie attribute name [{}] with value pattern [{}]",
attributeName, attributeName, attributeValuePattern.pattern());
if (StringUtils.isNotBlank(attributeName) && attributes.containsKey(attributeName)) {
val values = CollectionUtils.toCollection(attributes.get(attributeName));
LOGGER.trace("Attribute values found for [{}] are [{}]", attributeName, values);
val cookieValue = values
.stream()
.map(Object::toString)
.filter(string -> RegexUtils.find(attributeValuePattern, string))
.collect(Collectors.joining(","));
LOGGER.trace("Google analytics final cookie value is [{}]", cookieValue);
val request = WebUtils.getHttpServletRequestFromExternalWebflowContext(requestContext);
val response = WebUtils.getHttpServletResponseFromExternalWebflowContext(requestContext);
googleAnalyticsCookieBuilder.addCookie(request, response,
CookieRetrievingCookieGenerator.isRememberMeAuthentication(requestContext), cookieValue);
}
return null;
}
}
| apache-2.0 |
komamj/KomaMusic | app/src/main/java/com/koma/music/detail/artistdetail/ArtistDetailContract.java | 1361 | /*
* Copyright (C) 2017 Koma MJ
*
* Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.koma.music.detail.artistdetail;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import com.koma.music.base.BasePresenter;
import com.koma.music.base.BaseView;
import com.koma.music.data.model.Album;
import java.util.List;
/**
* Created by koma on 5/5/17.
*/
public interface ArtistDetailContract {
interface View extends BaseView<Presenter> {
Context getContext();
long getArtistId();
void showArtistAlbums(List<Album> albumList);
void showArtwork(Drawable albumArt);
void showArtwork(Bitmap bitmap);
}
interface Presenter extends BasePresenter {
void loadArtistAlbums(long artistId);
void loadArtWork(long artistId);
}
}
| apache-2.0 |
OpenVnmrJ/OpenVnmrJ | src/vnmrj/src/vnmr/ui/shuf/DatePanel.java | 7897 | /*
* Copyright (C) 2015 University of Oregon
*
* You may distribute under the terms of either the GNU General Public
* License or the Apache License, as specified in the LICENSE file.
*
* For more information, see the LICENSE file.
*/
package vnmr.ui.shuf;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import java.io.*;
import javax.swing.*;
import javax.swing.event.*;
import vnmr.bo.*;
import vnmr.ui.*;
import vnmr.util.*;
/**
* A DatePanel consists of a calendar date, plus controls to modify the
* date.
*/
class DatePanel extends JComponent {
// ==== instance variables
/** calendar date */
private GregorianCalendar cal;
/** date label */
private UnderlineButton dateButton;
/** date listeners */
private Vector listeners;
/**
* constructor
* @param cal date
*/
public DatePanel(GregorianCalendar cal2) {
listeners = new Vector();
setLayout(new FlowLayout());
dateButton = new UnderlineButton();
dateButton.setBorder(BorderFactory.createEmptyBorder());
// dateButton.setForeground(Color.blue);
// JButton buttonLeft = new JButton(Util.getImageIcon("left.gif"));
JButton buttonLeft = new JButton(Util.getImageIcon("open_arrow_left.png"));
buttonLeft.setOpaque(false);
buttonLeft.setBorder(BorderFactory.createEmptyBorder());
add(buttonLeft);
add(dateButton);
// JButton buttonRight = new JButton(Util.getImageIcon("right.gif"));
JButton buttonRight = new JButton(Util.getImageIcon("open_arrow_right.png"));
buttonRight.setOpaque(false);
buttonRight.setBorder(BorderFactory.createEmptyBorder());
add(buttonRight);
if (cal2 != null)
setDate(cal2);
buttonLeft.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
//DatePanel.this.cal.add(Calendar.DATE, -1);
//updateDateLabel();
GregorianCalendar newCal =
(GregorianCalendar)DatePanel.this.cal.clone();
newCal.add(Calendar.DATE, -1);
notifyDateListeners(newCal);
}
});
buttonRight.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
//DatePanel.this.cal.add(Calendar.DATE, 1);
//updateDateLabel();
GregorianCalendar newCal =
(GregorianCalendar)DatePanel.this.cal.clone();
newCal.add(Calendar.DATE, 1);
notifyDateListeners(newCal);
}
});
// Click on the date button/string and it brings up a tck calendar
// which return the date selected.
dateButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
// create a Runnable so that I can start the calendar in a
// thread and wait for the response in the thread without
// handing up vnmrj.
Runnable runCal = new Runnable() {
public void run() {
try {
Runtime rt = Runtime.getRuntime();
// Get the system directory where the
// executable is
String dir = FileUtil.sysdir();
Process prcs = null;
try {
// Start up the calendar
prcs = rt.exec(dir + "/tcl/bin/xcal2");
// attach to its stdout
InputStream istrm = prcs.getInputStream();
BufferedReader bfr;
bfr = new BufferedReader(new InputStreamReader(istrm));
// wait until the calendar exits
String dateStr = bfr.readLine();
// Was there any output? If quit is
// clicked, there will be none
if(dateStr != null && dateStr.length() > 0) {
int date[] = {-1,-1,-1};
StringTokenizer st =
new StringTokenizer(dateStr);
for(int i=0; i < 3; i++) {
if(st.hasMoreTokens()) {
String digit = st.nextToken();
date[i] =
Integer.parseInt(digit);
}
}
if(date[0] > -1 && date[1] > -1 &&
date[2] > -1) {
GregorianCalendar newCal;
newCal = new GregorianCalendar(
date[0], date[1] -1,
date[2]);
// Tell the locator to update the
// item in the sentence.
notifyDateListeners(newCal);
}
}
}
finally {
// It is my understanding that these streams are left
// open sometimes depending on the garbage collector.
// So, close them.
if(prcs != null) {
OutputStream os = prcs.getOutputStream();
if(os != null)
os.close();
InputStream is = prcs.getInputStream();
if(is != null)
is.close();
is = prcs.getErrorStream();
if(is != null)
is.close();
}
}
}
catch (Exception e) {
Messages.postError("Problem getting date "
+ " from calendar");
Messages.writeStackTrace(e);
return;
}
}
};
// Start the thread, then just continue and let java run.
Thread th = new Thread(runCal);
th.start();
}
});
} // DatePanel()
/**
* add a date listener
* @param listener
*/
public void addDateListener(DateListener listener) {
listeners.addElement(listener);
} // addDateListener()
/**
* notify date listeners of new date
* @param newCal new date
*/
public void notifyDateListeners(GregorianCalendar newCal) {
Enumeration en = listeners.elements();
for ( ; en.hasMoreElements(); ) {
((DateListener)en.nextElement()).dateChanged(newCal);
}
} // notifyDateListeners()
/**
* set date
* @param cal calendar date
*/
public void setDate(GregorianCalendar cal) {
this.cal = cal;
updateDateLabel();
} // setDate()
/**
* update dateButton according to calendar
*/
private void updateDateLabel() {
dateButton.setText(getDateStr());
dateButton.repaint();
} // updateDateLabel()
/**
* get date as a string
* @return date string
*/
private String getDateStr() {
return cal.get(Calendar.YEAR) + "-" + (cal.get(Calendar.MONTH) + 1) +
"-" + cal.get(Calendar.DAY_OF_MONTH);
} // getDateStr()
} // class DatePanel
| apache-2.0 |
agwlvssainokuni/sqlapp | src/main/java/cherry/sqlapp/controller/sqltool/load/SqltoolLoadControllerImpl.java | 4908 | /*
* Copyright 2014,2015 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cherry.sqlapp.controller.sqltool.load;
import static org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder.fromMethodCall;
import static org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder.on;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mobile.device.site.SitePreference;
import org.springframework.security.core.Authentication;
import org.springframework.stereotype.Component;
import org.springframework.validation.BindingResult;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import org.springframework.web.servlet.view.RedirectView;
import org.springframework.web.util.UriComponents;
import cherry.foundation.async.AsyncProcessFacade;
import cherry.sqlapp.controller.PathDef;
import cherry.sqlapp.controller.sqltool.MdFormUtil;
import cherry.sqlapp.db.dto.SqltoolLoad;
import cherry.sqlapp.db.dto.SqltoolMetadata;
import cherry.sqlapp.service.sqltool.DataSourceDef;
import cherry.sqlapp.service.sqltool.metadata.MetadataService;
import cherry.sqlapp.service.sqltool.query.LoadService;
@Component
public class SqltoolLoadControllerImpl implements SqltoolLoadController {
public static final String ASYNC_PARAM = "asyncParam";
@Autowired
private DataSourceDef dataSourceDef;
@Autowired
private AsyncProcessFacade asyncProcessFacade;
@Autowired
private MetadataService metadataService;
@Autowired
private LoadService loadService;
@Autowired
private FormUtil formUtil;
@Autowired
private MdFormUtil mdFormUtil;
@Override
public SqltoolLoadForm getForm(Integer ref, Authentication auth) {
if (ref != null) {
SqltoolMetadata md = metadataService.findById(ref, auth.getName());
if (md != null) {
SqltoolLoad record = loadService.findById(ref);
if (record != null) {
return formUtil.getForm(record);
}
}
}
SqltoolLoadForm form = new SqltoolLoadForm();
form.setDatabaseName(dataSourceDef.getDefaultName());
return form;
}
@Override
public ModelAndView init(Authentication auth, Locale locale, SitePreference sitePref, HttpServletRequest request) {
ModelAndView mav = new ModelAndView(PathDef.VIEW_SQLTOOL_LOAD_INIT);
return mav;
}
@Override
public ModelAndView execute(SqltoolLoadForm form, BindingResult binding, Authentication auth, Locale locale,
SitePreference sitePref, HttpServletRequest request, RedirectAttributes redirAttr) {
if (binding.hasErrors()) {
ModelAndView mav = new ModelAndView(PathDef.VIEW_SQLTOOL_LOAD_INIT);
return mav;
}
long asyncId = asyncProcessFacade.launchFileProcess(auth.getName(), "SqltoolLoadController", form.getFile(),
"execLoadFileProcessHandler", form.getDatabaseName(), form.getSql());
redirAttr.addFlashAttribute(ASYNC_PARAM, asyncId);
UriComponents uc = fromMethodCall(on(SqltoolLoadController.class).finish(auth, locale, sitePref, request))
.build();
ModelAndView mav = new ModelAndView();
mav.setView(new RedirectView(uc.toUriString(), true));
return mav;
}
@Override
public ModelAndView finish(Authentication auth, Locale locale, SitePreference sitePref, HttpServletRequest request) {
ModelAndView mav = new ModelAndView(PathDef.VIEW_SQLTOOL_LOAD_FINISH);
return mav;
}
@Override
public ModelAndView create(SqltoolLoadForm form, BindingResult binding, Authentication auth, Locale locale,
SitePreference sitePref, HttpServletRequest request) {
if (binding.hasErrors()) {
ModelAndView mav = new ModelAndView(PathDef.VIEW_SQLTOOL_LOAD_INIT);
return mav;
}
SqltoolLoad record = new SqltoolLoad();
record.setDatabaseName(form.getDatabaseName());
record.setQuery(form.getSql());
record.setLockVersion(form.getLockVersion());
int id = loadService.create(record, auth.getName());
UriComponents uc = fromMethodCall(on(SqltoolLoadIdController.class).init(id, auth, locale, sitePref, request))
.build();
ModelAndView mav = new ModelAndView();
mav.setView(new RedirectView(uc.toUriString(), true));
return mav;
}
}
| apache-2.0 |
Axway/Grapes | server/src/test/java/org/axway/grapes/server/core/options/filters/SearchFilterTest.java | 1686 | package org.axway.grapes.server.core.options.filters;
import org.axway.grapes.server.db.datamodel.DbSearch;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.*;
public class SearchFilterTest {
@Test
public void filter() throws Exception {
SearchFilter filter = new SearchFilter(true, true);
assertFalse(filter.filter(null));
filter = new SearchFilter(true, false);
assertFalse(filter.filter(null));
filter = new SearchFilter(false, true);
assertFalse(filter.filter(null));
filter = new SearchFilter(null, null);
assertFalse(filter.filter(null));
}
@Test
public void testModuleSearch() {
List<String> moduleIds = new ArrayList<>();
moduleIds.add("test_id_1");
moduleIds.add("test_id_2");
List<String> artifactIds = new ArrayList<>();
artifactIds.add("test_artifact_id_1");
artifactIds.add("test_artifact_id_2");
DbSearch filterModules = new DbSearch();
filterModules.setModules(moduleIds);
DbSearch filterArtifacts = new DbSearch();
filterArtifacts.setArtifacts(artifactIds);
SearchFilter filter = new SearchFilter(false, true);
assertTrue(filter.filter(filterModules));
assertFalse(filter.filter(filterArtifacts));
filter = new SearchFilter(true, false);
assertTrue(filter.filter(filterArtifacts));
assertFalse(filter.filter(filterModules));
filter = new SearchFilter(false, false);
assertFalse(filter.filter(filterArtifacts));
assertFalse(filter.filter(filterModules));
}
} | apache-2.0 |
qinannmj/FireFly | src/main/java/cn/com/sparkle/firefly/net/netlayer/raptor/RaptorBuf.java | 904 | package cn.com.sparkle.firefly.net.netlayer.raptor;
import java.nio.ByteBuffer;
import cn.com.sparkle.firefly.net.netlayer.buf.Buf;
import cn.com.sparkle.raptor.core.buff.CycleBuff;
import cn.com.sparkle.raptor.core.buff.IoBuffer;
public class RaptorBuf implements Buf {
private IoBuffer buffer;
private ByteBuffer byteBuffer;
public RaptorBuf(IoBuffer buffer) {
this.buffer = buffer;
this.byteBuffer = buffer.getByteBuffer();
}
private RaptorBuf(IoBuffer buffer, ByteBuffer byteBuffer) {
this.buffer = buffer;
this.byteBuffer = byteBuffer;
}
@Override
public ByteBuffer getByteBuffer() {
return byteBuffer;
}
@Override
public Buf duplicateBuf() {
RaptorBuf buf = new RaptorBuf(buffer, buffer.getByteBuffer().duplicate());
if (buffer instanceof CycleBuff) {
((CycleBuff) buffer).incRef();
}
return buf;
}
@Override
public void close() {
buffer.close();
}
}
| apache-2.0 |
lewis-ing/bachelor-all | bachelor_appconsole/appconsoleWeb/src/main/webapp/js/page.js | 2230 | var showPageNum = 15;//显示行数
var startLineNum = 0;//开始行数
var currentPageNum = 0;//当前页数
var gloabPageDiv = "";
var gloabPageUrl = "";
var gloabDataSearchResult = function(){};
/** 生成分页组件 **/
function createPageComponent(pageResult,pageDiv){
if(pageResult!=null){
var html = '<div class="pagination pagination-large pagination-centered">';
html += "<ul>";
/** 判断上一页 **/
if((Number(pageResult.page))>=0){
html+='<li><a href="javascript:void(0);" onclick="processPageInfo('+(Number(pageResult.page)-1)+','+pageResult.pageRowNum+')">上一页</a></li>';
}
/** 循环页数 **/
var pageCount = pageResult.pageCount;
for(var index=0;index<pageCount;index++){
if(index<8){
html+='<li><a href="javascript:void(0);" onclick="processPageInfo('+(index)+','+pageResult.pageRowNum+')">'+(index+1)+'</a></li>';
} else {
break;
}
}
/** 判断下一页 **/
if((Number(pageResult.page))<pageResult.pageCount) {
html+='<li><a href="javascript:void(0);" onclick="processPageInfo('+(Number(pageResult.page)+1)+','+pageResult.pageRowNum+')">下一页</a></li>';
}
html+='<li><a href="javascript:void(0);">'+(pageResult.pageCount)+'/'+(Number(pageResult.page)+1)+'</a></li>';
html += "</ul>";
html += "</div>";
$("#"+pageDiv).html(html);
} else {
$("#"+pageDiv).html("");
}
}
/** 操作分页信息 **/
function processPageInfo(tempCurrentPageNum,tempPageRowNum){
currentPageNum = tempCurrentPageNum;
startLineNum = currentPageNum*Number(tempPageRowNum);
/** 分页请求函数 **/
pageRequestDatas();
}
/** 操作分页请求 **/
function processRequestInfo(pageDiv,pageUrl,dataSearchResult){
gloabPageDiv = pageDiv;
gloabPageUrl = pageUrl;
gloabDataSearchResult = dataSearchResult;
/** 分页请求函数 **/
pageRequestDatas();
}
/** 分页请求函数 **/
function pageRequestDatas(){
$.ajax({
url:gloabPageUrl,
type:"get",
data:"start="+startLineNum+"&limit="+showPageNum+"&page="+currentPageNum,
dataType:"json",
success:function(result){
/** 生成分页组件 **/
createPageComponent(result.pageResult,gloabPageDiv);
/** 返回结果集 **/
gloabDataSearchResult(result.dataResult);
}
});
} | apache-2.0 |
Enteee/sinep | ch.bfh.sinep.service/src/main/java/ch/bfh/sinep/service/impl/DefaultUserService.java | 5348 | package ch.bfh.sinep.service.impl;
import ch.bfh.sinep.model.Group;
import ch.bfh.sinep.model.User;
import ch.bfh.sinep.repository.GroupRepository;
import ch.bfh.sinep.repository.UserRepository;
import ch.bfh.sinep.service.UserService;
import ch.bfh.sinep.service.dto.GroupDTO;
import ch.bfh.sinep.service.dto.UserDTO;
import ch.bfh.sinep.service.dto.UserWithGroupsDTO;
import org.modelmapper.ModelMapper;
import org.modelmapper.TypeToken;
import javax.inject.Inject;
import javax.inject.Named;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
/**
* Service class (Implementation) for the user crud (create, read, update, delete) operations.
*
* @author holzer
* @since 17.11.2015
*/
@Named
public class DefaultUserService implements UserService {
@Inject
private UserRepository userRepository;
@Inject
private GroupRepository groupRepository;
private final ModelMapper mapper = new ModelMapper();
/**
* create userWithGroupsDTO
*
* @param userWithGroupsDTO
* @return
*/
/**
* Creates the given user in the database. Adds also references from groups to the created user.
*
* @param userWithGroupsDTO User to create/save in the database.
* @return A mapped persistedGroup instance to a UserWithGroupsDTO.
*/
public UserWithGroupsDTO create(UserWithGroupsDTO userWithGroupsDTO) {
if (userWithGroupsDTO.getId() != null) {
throw new RuntimeException("Can not create element with id");
}
if(userWithGroupsDTO.getGroups() == null || userWithGroupsDTO.getGroups().size() == 0){
throw new RuntimeException("User must have at least one group assigned");
}
User user = mapper.map(userWithGroupsDTO, User.class);
User updatedUser = userRepository.save(user);
//Add user to groups (bidirectional association)
Set<Group> addedGroups = new HashSet<Group>();
for (Group group : updatedUser.getGroups()) {
Group groupToSave = groupRepository.findOne(group.getId());
addedGroups.add(groupToSave);
groupToSave.addUser(user);
groupRepository.save(groupToSave);
}
updatedUser.setGroups(addedGroups);
return mapper.map(updatedUser, UserWithGroupsDTO.class);
}
/**
* Reads the user with the given id from the database. Returns a {@link UserWithGroupsDTO}-object of the
* read user. This object contains also a set of the groups which the user belongs to.
*
* @param id Id of the user to read from the database.
* @return The read user.
*/
public UserWithGroupsDTO read(Long id) {
User user = userRepository.findOne(id);
if (user == null) return null;
return mapper.map(user, UserWithGroupsDTO.class);
}
/**
* Reads all users from the database. Returns a collection of {@link UserWithGroupsDTO}-objects.
* Every object in the collection contains also a set of the groups which the user belongs to.
*
* @return A collection of all read users.
*/
public Collection<UserWithGroupsDTO> list() {
Iterable<User> users = userRepository.findAll();
Type listType = new TypeToken<Collection<UserWithGroupsDTO>>() {
}.getType();
Collection<UserWithGroupsDTO> userList = mapper.map(users, listType);
return userList;
}
/**
* Updates the given user in the database.
* Also updates the references from the group to the user (if the updated user belongs to new groups).
*
* @param userWithGroupsDTO User to update in the database.
* @return Updated user.
*/
public UserWithGroupsDTO update(UserWithGroupsDTO userWithGroupsDTO) {
if (userWithGroupsDTO.getId() == null) {
throw new RuntimeException("Can not update new element");
}
if(userWithGroupsDTO.getGroups() == null){
throw new RuntimeException("User must have at least one group assigned");
}
User userBeforeUpdate = userRepository.findOne(userWithGroupsDTO.getId());
User user = mapper.map(userWithGroupsDTO, User.class);
//remove old references from groups to user
for (Group group : userBeforeUpdate.getGroups()) {
group.removeUser(user);
groupRepository.save(group);
}
User updatedUser = userRepository.save(user);
//adds the references from groups to user (bidirectional association)
for (Group group : updatedUser.getGroups()) {
Group groupToSave = groupRepository.findOne(group.getId());
groupToSave.addUser(user);
groupRepository.save(groupToSave);
}
return mapper.map(updatedUser, UserWithGroupsDTO.class);
}
/**
* Deletes the given user from the database.
* Also removes the user from his groups.
*
* @param userDTO User to delete from the database.
*/
public void delete(UserDTO userDTO) {
User user = userRepository.findOne(userDTO.getId());
//remove user from his groups
for (Group group : user.getGroups()) {
group.removeUser(user);
groupRepository.save(group);
}
userRepository.delete(user);
}
}
| apache-2.0 |
Resgrid/BigBoard | src/components/app-popover/app-popover.ts | 680 | import { Component } from '@angular/core';
import { NavParams } from 'ionic-angular';
@Component({
selector: 'app-popover',
templateUrl: 'app-popover.html'
})
export class AppPopover {
private saveLayoutCallback;
private loadLayoutCallback;
private clearLayoutCallback;
constructor(private navParams: NavParams) {
this.saveLayoutCallback = this.navParams.get('saveLayout')
this.loadLayoutCallback = this.navParams.get('loadLayout')
this.clearLayoutCallback = this.navParams.get('clearLayout')
}
saveLayout() {
this.saveLayoutCallback();
}
loadLayout() {
this.loadLayoutCallback();
}
clear() {
this.clearLayoutCallback();
}
} | apache-2.0 |
ctr-lang/ctr | __tests__/cases-api/public-methods/set-callback/once.js | 741 | const CTR = require('ctr').js;
const ctr = new CTR();
const base = {
width: '200px',
color: 'red',
height: '400px'
};
//init
ctr.create('.test-1', base);
//some outside custom fuction
const someCustomThing = function (val) {
return val.toUpperCase();
};
//sets our callback
ctr.setCallback(function (err, res) {
if (err) {
throw err;
}
res = !this.transform.length ? res : this._transformData(res);
//out custom thing
res = someCustomThing(res);
this.res = res;
this._resSetAdd(res);
}, {
//only do it once
once: true
});
//shoud be uppercase
ctr.create('.test-2', base);
//should not be upper case === .test-1
ctr.create('.test-3', base);
const res = ctr.getRes();
module.exports = {
res: res
};
| apache-2.0 |
ArmstrongYang/StudyShare | Python-Onedrive/onedriveAuth.py | 1307 | import os
import onedrivesdk
redirect_uri = 'http://localhost:8080/'
client_secret = 'J1toxbxTRhNLKDAposAZUEr'
client_id='00000000401CDF7B'
api_base_url='https://api.onedrive.com/v2.0/'
scopes=['wl.signin', 'wl.offline_access', 'onedrive.readwrite']
http_provider = onedrivesdk.HttpProvider()
auth_provider = onedrivesdk.AuthProvider(
http_provider=http_provider,
client_id=client_id,
scopes=scopes)
client = onedrivesdk.OneDriveClient(api_base_url, auth_provider, http_provider)
auth_url = client.auth_provider.get_auth_url(redirect_uri)
# Ask for the code
print('Paste this URL into your browser, approve the app\'s access.')
print('Copy everything in the address bar after "code=", and paste it below.')
print(auth_url)
# 每次生成的code都不一样,在浏览器连接中查找
# The above code requires copy-pasting into your browser and back into your console.
code = input('Paste code here: ')
# code = 'M388fd440-b4b3-ef05-d1e8-6db506860a89'
client.auth_provider.authenticate(code, redirect_uri, client_secret)
# Upload an Item
# returned_item = client.item(drive='me', id='root').children['newfile.txt'].upload('path_to_file.txt')
if __name__=='__main__':
print(__file__)
dir = os.getcwd()
print(dir)
filelist = os.listdir(dir)
print(filelist)
exit(0) | apache-2.0 |
martincostello/alexa-london-travel | test/LondonTravel.Skill.Tests/HttpRequestInterceptionFilter.cs | 1248 | // Copyright (c) Martin Costello, 2017. All rights reserved.
// Licensed under the Apache 2.0 license. See the LICENSE file in the project root for full license information.
using JustEat.HttpClientInterception;
using Microsoft.Extensions.Http;
namespace MartinCostello.LondonTravel.Skill;
/// <summary>
/// A class representing an <see cref="IHttpMessageHandlerBuilderFilter"/> that configures
/// HTTP request interception with HttpClientFactory. This class cannot be inherited.
/// </summary>
/// <remarks>
/// See https://github.com/justeat/httpclient-interception/blob/4e52f0e269654bbcf4745aa307624d807e4f19e2/samples/SampleApp.Tests/HttpServerFixture.cs#L27-L30.
/// </remarks>
internal sealed class HttpRequestInterceptionFilter : IHttpMessageHandlerBuilderFilter
{
private readonly HttpClientInterceptorOptions _options;
internal HttpRequestInterceptionFilter(HttpClientInterceptorOptions options)
{
_options = options;
}
/// <inheritdoc />
public Action<HttpMessageHandlerBuilder> Configure(Action<HttpMessageHandlerBuilder> next)
{
return (builder) =>
{
next(builder);
builder.AdditionalHandlers.Add(_options.CreateHttpMessageHandler());
};
}
}
| apache-2.0 |
leafclick/intellij-community | platform/editor-ui-api/src/com/intellij/ide/ui/LCDRenderingScope.java | 310 | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.ui;
/**
* @deprecated use {@link AntialiasingType} instead
*/
@Deprecated
public enum LCDRenderingScope {
IDE,
EXCLUDING_EDITOR,
OFF
}
| apache-2.0 |
lvillani/droidkit | src/main/java/com/google/android/vending/billing/Security.java | 4983 | /* Copyright (c) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.vending.billing;
import android.text.TextUtils;
import android.util.Log;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.Signature;
import java.security.SignatureException;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.X509EncodedKeySpec;
/**
* Security-related methods. For a secure implementation, all of this code
* should be implemented on a server that communicates with the
* application on the device. For the sake of simplicity and clarity of this
* example, this code is included here and is executed on the device. If you
* must verify the purchases on the phone, you should obfuscate this code to
* make it harder for an attacker to replace the code with stubs that treat all
* purchases as verified.
*/
public class Security {
private static final String TAG = "IABUtil/Security";
private static final String KEY_FACTORY_ALGORITHM = "RSA";
private static final String SIGNATURE_ALGORITHM = "SHA1withRSA";
/**
* Verifies that the data was signed with the given signature, and returns
* the verified purchase. The data is in JSON format and signed
* with a private key. The data also contains the {@link PurchaseState}
* and product ID of the purchase.
*
* @param base64PublicKey the base64-encoded public key to use for verifying.
* @param signedData the signed JSON string (signed, not encrypted)
* @param signature the signature for the data, signed with the private key
*/
public static boolean verifyPurchase(String base64PublicKey, String signedData, String signature) {
if (TextUtils.isEmpty(signedData) || TextUtils.isEmpty(base64PublicKey) ||
TextUtils.isEmpty(signature)) {
Log.e(TAG, "Purchase verification failed: missing data.");
return false;
}
PublicKey key = Security.generatePublicKey(base64PublicKey);
return Security.verify(key, signedData, signature);
}
/**
* Generates a PublicKey instance from a string containing the
* Base64-encoded public key.
*
* @param encodedPublicKey Base64-encoded public key
* @throws IllegalArgumentException if encodedPublicKey is invalid
*/
public static PublicKey generatePublicKey(String encodedPublicKey) {
try {
byte[] decodedKey = Base64.decode(encodedPublicKey);
KeyFactory keyFactory = KeyFactory.getInstance(KEY_FACTORY_ALGORITHM);
return keyFactory.generatePublic(new X509EncodedKeySpec(decodedKey));
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} catch (InvalidKeySpecException e) {
Log.e(TAG, "Invalid key specification.");
throw new IllegalArgumentException(e);
} catch (Base64DecoderException e) {
Log.e(TAG, "Base64 decoding failed.");
throw new IllegalArgumentException(e);
}
}
/**
* Verifies that the signature from the server matches the computed
* signature on the data. Returns true if the data is correctly signed.
*
* @param publicKey public key associated with the developer account
* @param signedData signed data from server
* @param signature server signature
* @return true if the data and signature match
*/
public static boolean verify(PublicKey publicKey, String signedData, String signature) {
Signature sig;
try {
sig = Signature.getInstance(SIGNATURE_ALGORITHM);
sig.initVerify(publicKey);
sig.update(signedData.getBytes());
if (!sig.verify(Base64.decode(signature))) {
Log.e(TAG, "Signature verification failed.");
return false;
}
return true;
} catch (NoSuchAlgorithmException e) {
Log.e(TAG, "NoSuchAlgorithmException.");
} catch (InvalidKeyException e) {
Log.e(TAG, "Invalid key specification.");
} catch (SignatureException e) {
Log.e(TAG, "Signature exception.");
} catch (Base64DecoderException e) {
Log.e(TAG, "Base64 decoding failed.");
}
return false;
}
}
| apache-2.0 |
XamlAnimatedGif/WpfAnimatedGif | WpfAnimatedGif.Demo/InputBox.xaml.cs | 1124 | using System;
using System.Collections.Generic;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
namespace WpfAnimatedGif.Demo
{
/// <summary>
/// Interaction logic for InputBox.xaml
/// </summary>
public partial class InputBox : Window
{
public InputBox()
{
InitializeComponent();
}
private void OKButton_Click(object sender, RoutedEventArgs e)
{
DialogResult = true;
}
private void CancelButton_Click(object sender, RoutedEventArgs e)
{
DialogResult = false;
}
public static string Show(string prompt, string title)
{
var w = new InputBox();
w.Title = title;
w.prompt.Text = prompt;
if (w.ShowDialog() is true)
{
return w.input.Text;
}
return null;
}
}
}
| apache-2.0 |
mpf-soft/social-modules | forum/views/components/accessdenied.php | 703 | <div class="accessdenied-reason">
<?php if ('category' == $location) { ?>
<?= \mpf\modules\forum\components\Translator::get()->translate("Can't access this category!"); ?>
<?php } elseif ('userprofile' == $location) { ?>
<?php if (\mpf\WebApp::get()->user()->isGuest()) { ?>
<?= \mpf\modules\forum\components\Translator::get()->translate("Must login to see user profile!"); ?>
<?php } else { ?>
<?= \mpf\modules\forum\components\Translator::get()->translate("Can't access user profile!"); ?>
<?php } ?>
<?php } else { ?>
<?= \mpf\modules\forum\components\Translator::get()->translate("Access denied"); ?>
<?php } ?>
</div>
| apache-2.0 |
google/digitalbuildings | tools/validators/instance_validator/tests/entity_instance_test.py | 27133 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for entity_instance.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os import path
from typing import Dict, List
from unittest import mock
from absl.testing import absltest
from tests import test_constants
from validate import connection
from validate import entity_instance
from validate import field_translation
from validate import generate_universe
from validate import instance_parser
from validate import link
_TESTCASE_PATH = test_constants.TEST_INSTANCES
_INIT_CFG = instance_parser.ConfigMode.INITIALIZE
_UPDATE_CFG = instance_parser.ConfigMode.UPDATE
_ADD = instance_parser.EntityOperation.ADD
_UPDATE = instance_parser.EntityOperation.UPDATE
_DELETE = instance_parser.EntityOperation.DELETE
def _ParserHelper(testpaths: List[str]) -> instance_parser.InstanceParser:
parser = instance_parser.InstanceParser()
for filepath in testpaths:
parser.AddFile(filepath)
parser.Finalize()
return parser
def _Helper(testpaths: List[str]) -> Dict[str, entity_instance.EntityInstance]:
return _ParserHelper(testpaths).GetEntities()
class EntityInstanceTest(absltest.TestCase):
@classmethod
def setUpClass(cls):
super(cls, EntityInstanceTest).setUpClass()
cls.config_universe = generate_universe.BuildUniverse(
use_simplified_universe=True)
cls.init_validator = entity_instance.InstanceValidator(
cls.config_universe, _INIT_CFG)
cls.update_validator = entity_instance.InstanceValidator(
cls.config_universe, _UPDATE_CFG)
@mock.patch.object(
entity_instance.InstanceValidator, 'Validate', return_value=True)
@mock.patch.object(
entity_instance.GraphValidator, 'Validate', return_value=True)
@mock.patch.object(entity_instance, 'EntityInstance')
def testCombinedChecksInstanceAndGraph(self, mock_entity, mock_gv, mock_iv):
validator = entity_instance.CombinationValidator(self.config_universe,
_UPDATE_CFG, {})
self.assertTrue(validator.Validate(mock_entity))
mock_iv.assert_called_once_with(mock_entity)
mock_gv.assert_called_once_with(mock_entity)
def testValidate_requiresEtagOnUpdate(self):
valid_instance = entity_instance.EntityInstance(
_UPDATE,
'FACILITIES/123456',
etag='a12345',
update_mask=['connections'])
invalid_instance = entity_instance.EntityInstance(
_UPDATE, 'FACILITIES/123456', update_mask=['connections'])
self.assertTrue(self.update_validator.Validate(valid_instance))
self.assertFalse(self.update_validator.Validate(invalid_instance))
def testValidate_verifiesTypeAgainstNamespace(self):
instance = entity_instance.EntityInstance(
_UPDATE,
'FACILITIES/123456',
namespace='FACILITIES',
type_name='BUILDING',
etag='a12345',
update_mask=['connections'])
self.assertTrue(self.update_validator.Validate(instance))
def testValidate_verifiesTypeAgainstNamespace_failsIfNotDefinedInUniverse(
self):
instance = entity_instance.EntityInstance(
_UPDATE,
'NOT_A_NAMESPACE/123456',
namespace='NOT_A_NAMESPACE',
type_name='BUILDING',
etag='a12345',
update_mask=['connections'])
is_valid = self.update_validator.Validate(instance)
self.assertFalse(is_valid)
def testValidate_verifiesTypeAgainstNamespace_badlyConfiguredUniverseFails(
self):
instance = entity_instance.EntityInstance(
_UPDATE,
'FACILITIES/123456',
namespace='FOO',
type_name='BUILDING',
etag='a12345',
update_mask=['connections'])
is_valid = self.update_validator.Validate(instance)
self.assertFalse(is_valid)
def testValidateBadEntityTypeFormat(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_building_type.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
try:
entity_instance.EntityInstance.FromYaml(entity)
except TypeError as e:
self.assertEqual(type(e), TypeError)
else:
self.fail(f'{TypeError} was not raised')
def testInstanceRequiresEntityTypeToExist(self):
instance = entity_instance.EntityInstance(
_UPDATE,
'FACILITIES/123456',
namespace='FACILITIES',
type_name='LIGHTING/NOT_A_LAMP',
etag='a12345',
update_mask=['connections'])
self.assertFalse(self.update_validator.Validate(instance))
def testValidateBadEntityNamespace(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_building_type_namespace.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateRejectsUseOfAbstractType(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_abstract_type.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateBadEntityType(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_building_type_entity.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateMultipleTranslationWithFields(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'GOOD',
'good_building_translation_fields.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateTranslationWithRequiredFieldMissing(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'BAD',
'bad_translation_with_required_field_missing.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidatePassthroughTranslationWithRequiredFieldMissing(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'BAD',
'bad_passthrough_translation_with_required_field_missing.yaml'
)
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateTranslationWithRequiredFieldCloudDeviceIdMissing(self):
try:
_Helper([
path.join(_TESTCASE_PATH, 'BAD',
'bad_translation_missing_cloud_device_id.yaml')
])
except KeyError as e:
self.assertEqual(type(e), KeyError)
else:
self.fail(f'{KeyError} was not raised')
def testValidateTranslation(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_translation.yaml')])
parsed = dict(parsed)
entity_hvac = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity_hvac)
self.assertTrue(self.init_validator.Validate(instance))
self.assertEqual(instance.cloud_device_id, 'foobar')
def testValidateTranslationWithExplicitlyMissingField(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'GOOD',
'good_translation_field_marked_missing.yaml')
])
parsed = dict(parsed)
entity_hvac = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity_hvac)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateMultipleTranslationsWithIdenticalTypes(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_translation_identical.yaml')])
parsed = dict(parsed)
for entity_name in list(parsed):
entity = dict(parsed[entity_name])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateBadTranslationWithExtraField(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'BAD',
'bad_translation_with_extra_field.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateTranslationUnits(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_translation_units.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateTranslationUnitsAndStates(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'GOOD',
'good_translation_units_and_states.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateBadTranslationStates(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_translation_states.yaml')])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateGoodTranslationStatesList(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'GOOD', 'good_translation_states_list.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
def testValidateBadTranslationStatesListWithDuplicate(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'BAD',
'bad_translation_states_list_with_duplicate.yaml')
])
parsed = dict(parsed)
entity = dict(parsed[list(parsed)[0]])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testValidateBadLinkFields(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_building_links_fields.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
self.assertFalse(
combination_validator.Validate(entity_instances.get('ENTITY-NAME')))
def testValidateBadLinkEntityName(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'BAD', 'bad_building_links_entity_name.yaml')
])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
self.assertFalse(
combination_validator.Validate(entity_instances.get('ENTITY-NAME')))
def testValidateBadLinkWrongField(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_links_wrong_link.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _UPDATE, entity_instances)
self.assertFalse(
combination_validator.Validate(entity_instances.get('ENTITY-NAME')))
def testValidateBadLinkMissingField(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_links_missing_field.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
self.assertFalse(
combination_validator.Validate(entity_instances.get('ENTITY-NAME')))
def testValidateGoodLinkEntityName(self):
parsed = _Helper([path.join(_TESTCASE_PATH, 'GOOD', 'good_links.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
for _, instance in entity_instances.items():
self.assertTrue(combination_validator.Validate(instance))
def testValidateGoodLinkWithIncrementEntityName(self):
parsed = _Helper(
# KW: this one is a entity_franken-type it definitely won't make sense
[path.join(_TESTCASE_PATH, 'GOOD', 'good_links_increment.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
for _, instance in entity_instances.items():
self.assertTrue(combination_validator.Validate(instance))
def testValidateGoodLinkToPassthroughEntity(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_links_passthrough.yaml')])
entity_instances = {}
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
entity_instances[raw_entity] = entity
combination_validator = entity_instance.CombinationValidator(
self.config_universe, _INIT_CFG, entity_instances)
for _, instance in entity_instances.items():
self.assertTrue(combination_validator.Validate(instance))
def testValidateStates(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_translation_states.yaml')])
parsed = dict(parsed)
for raw_entity in list(parsed):
entity_parsed = dict(parsed[raw_entity])
entity = entity_instance.EntityInstance.FromYaml(entity_parsed)
self.assertTrue(self.init_validator.Validate(entity))
def testGoodConnectionType(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'GOOD', 'good_building_connections.yaml')])
parsed = dict(parsed)
entity_name = list(parsed)[0]
entity = dict(parsed[entity_name])
expected_connections = [
connection.Connection('FEEDS', 'ANOTHER-ENTITY'),
connection.Connection('CONTAINS', 'A-THIRD-ENTITY')
]
self.assertIn('connections', entity,
'entity does not have connections when expected')
self.assertIsNotNone(self.config_universe.connection_universe,
'universe does not have a valid connections universe')
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
self.assertCountEqual(expected_connections, instance.connections)
def testGoodConnectionList(self):
parsed = _Helper([
path.join(_TESTCASE_PATH, 'GOOD', 'good_building_connection_list.yaml')
])
parsed = dict(parsed)
entity_name = list(parsed)[0]
entity = dict(parsed[entity_name])
expected_connections = [
connection.Connection('FEEDS', 'ANOTHER-ENTITY'),
connection.Connection('CONTAINS', 'ANOTHER-ENTITY')
]
self.assertIn('connections', entity,
'entity does not have connections when expected')
self.assertIsNotNone(self.config_universe.connection_universe,
'universe does not have a valid connections universe')
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertTrue(self.init_validator.Validate(instance))
self.assertCountEqual(expected_connections, instance.connections)
def testBadConnectionType(self):
parsed = _Helper(
[path.join(_TESTCASE_PATH, 'BAD', 'bad_building_connections.yaml')])
parsed = dict(parsed)
entity_name = list(parsed)[0]
entity = dict(parsed[entity_name])
instance = entity_instance.EntityInstance.FromYaml(entity)
self.assertFalse(self.init_validator.Validate(instance))
def testInstanceLinkSourceFieldMustExist(self):
src_ok = entity_instance.EntityInstance(
_UPDATE,
'AHU-1',
links=[link.Link('CTRL-1', {'run_status': 'run_status'})],
etag='123')
bad_src_field = entity_instance.EntityInstance(
_UPDATE,
'AHU-1',
links=[link.Link('CTRL-1', {'nonexistent_status': 'run_status'})],
etag='123')
self.assertFalse(self.update_validator.Validate(bad_src_field))
self.assertTrue(self.update_validator.Validate(src_ok))
def testGraphOrphanLinkOkOnUpdate(self):
target = entity_instance.EntityInstance(
_UPDATE,
'AHU-1',
links=[link.Link('CTRL-1', {'run_status_1': 'run_status'})],
etag='123')
validator = entity_instance.GraphValidator(self.config_universe,
_UPDATE_CFG, {'CTRL-1': target})
self.assertTrue(validator.Validate(target))
def testGraphGoodConnection(self):
target = entity_instance.EntityInstance(
_ADD, 'VAV-123', connections=[connection.Connection('FEEDS', 'AHU-1')])
source = entity_instance.EntityInstance(
_ADD, 'AHU-1', connections=[connection.Connection('FEEDS', 'AHU-1')])
instances = {'VAV-123': target, 'AHU-1': source}
validator = entity_instance.GraphValidator(self.config_universe, _INIT_CFG,
instances)
self.assertTrue(validator.Validate(target))
def testGraphRejectsOrphanConnectionOnInit(self):
target = entity_instance.EntityInstance(
_ADD, 'VAV-123', connections=[connection.Connection('FEEDS', 'AHU-1')])
validator = entity_instance.GraphValidator(self.config_universe, _INIT_CFG,
{'VAV-123': target})
self.assertFalse(validator.Validate(target))
def testGraphAllowsOrphanConnectionOnInit(self):
target = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
connections=[connection.Connection('FEEDS', 'AHU-1')],
etag='123')
validator = entity_instance.GraphValidator(self.config_universe,
_UPDATE_CFG, {'VAV-123': target})
self.assertTrue(validator.Validate(target))
def testInstanceEtagNotRequiredForDelete(self):
no_tag = entity_instance.EntityInstance(_UPDATE, 'VAV-123')
no_tag_delete = entity_instance.EntityInstance(_DELETE, 'VAV-123')
self.assertFalse(self.update_validator.Validate(no_tag))
self.assertTrue(self.update_validator.Validate(no_tag_delete))
def testInstanceOperationRequiredOnUpdate(self):
entity = entity_instance.EntityInstance(_UPDATE, 'VAV-123', etag='1234')
self.assertFalse(self.init_validator.Validate(entity))
def testInstanceMultipleUnitsNotAllowed(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'foo_bar':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'standard_unit_1': 'raw_unit_1'}),
'foo_baz':
field_translation.DimensionalValue(
std_field_name='foo/baz',
unit_field_name='bar/unit',
raw_field_name='bar/raw',
unit_mappings={'standard_unit_1': 'raw_unit_2'}),
})
self.assertFalse(self.update_validator.Validate(entity))
def testInstance_DimensionalTranslation_MissingUnitMapping(self):
try:
entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'foo_bar':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={}),
})
except ValueError as e:
self.assertEqual(type(e), ValueError)
else:
self.fail(f'{ValueError} was not raised')
def testInstance_DimensionalTranslation_UndefinedField(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'UNDEFINED_UNIT':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'foo': 'bar'})
})
self.assertFalse(self.update_validator.Validate(entity))
def testInstance_DimensionalTranslation_FieldHasInvalidUnit(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'return_water_temperature_sensor':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'INVALID_SENSOR_UNIT': 'degF'})
})
self.assertFalse(self.update_validator.Validate(entity))
def testInstance_DimensionalTranslation_FieldIsValid(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'return_water_temperature_sensor':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'degrees_fahrenheit': 'degF'})
})
self.assertTrue(self.update_validator.Validate(entity))
def testInstance_MultiStateTranslation_MissingStates(self):
try:
entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'foo_bar':
field_translation.MultiStateValue(
std_field_name='foo/bar',
raw_field_name='foo/raw',
states={})
})
except ValueError as e:
self.assertEqual(type(e), ValueError)
else:
self.fail('{ValueError} was not raised')
def testInstance_MultiStateTranslation_UndefinedField(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'UNDEFINED_STATE':
field_translation.MultiStateValue(
std_field_name='foo/bar',
raw_field_name='foo/raw',
states={'foo': 'bar'})
})
self.assertFalse(self.update_validator.Validate(entity))
def testInstance_MultiStateTranslation_FieldHasInvalidState(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'exhaust_air_damper_command':
field_translation.MultiStateValue(
std_field_name='exhaust_air_damper_command',
raw_field_name='exhaust_air_damper_command',
states={'INVALID_STATE': '1'})
})
self.assertFalse(self.update_validator.Validate(entity))
def testInstance_MultiStateTranslation_FieldIsValid(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'exhaust_air_damper_command':
field_translation.MultiStateValue(
std_field_name='exhaust_air_damper_command',
raw_field_name='exhaust_air_damper_command',
states={
'OPEN': '1',
'CLOSED': '0'
})
})
self.assertTrue(self.update_validator.Validate(entity))
def testInstance_DimensionalValue_noUnitsExpected_noUnitsPasses(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'line_powerfactor_sensor':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'no_units': 'no_units'}),
})
self.assertTrue(self.update_validator.Validate(entity))
def testInstance_DimensionalValue_unitsExpected_noUnitsFails(self):
entity = entity_instance.EntityInstance(
_UPDATE,
'VAV-123',
etag='1234',
translation={
'zone_air_cooling_temperature_setpoint':
field_translation.DimensionalValue(
std_field_name='foo/bar',
unit_field_name='foo/unit',
raw_field_name='foo/raw',
unit_mappings={'no_units': 'no_units'}),
})
self.assertFalse(self.update_validator.Validate(entity))
if __name__ == '__main__':
absltest.main()
| apache-2.0 |
neerajkhosla/dynamicmvccrud | jTable/Properties/AssemblyInfo.cs | 1348 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("jTable")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("jTable")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("4af30a2b-e9ca-49f6-9ba7-d16c47892eaf")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Revision and Build Numbers
// by using the '*' as shown below:
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| apache-2.0 |
yashb/generator | vendor/composer/autoload_psr4.php | 1216 | <?php
// autoload_psr4.php @generated by Composer
$vendorDir = dirname(dirname(__FILE__));
$baseDir = dirname($vendorDir);
return array(
'WebSocket\\' => array($vendorDir . '/textalk/websocket/lib'),
'Symfony\\Polyfill\\Mbstring\\' => array($vendorDir . '/symfony/polyfill-mbstring'),
'Symfony\\Component\\Yaml\\' => array($vendorDir . '/symfony/yaml'),
'Symfony\\Component\\Process\\' => array($vendorDir . '/symfony/process'),
'Symfony\\Component\\Finder\\' => array($vendorDir . '/symfony/finder'),
'Symfony\\Component\\Filesystem\\' => array($vendorDir . '/symfony/filesystem'),
'Symfony\\Component\\Console\\' => array($vendorDir . '/symfony/console'),
'Socket\\Raw\\' => array($vendorDir . '/clue/socket-raw/src'),
'Psr\\Http\\Message\\' => array($vendorDir . '/psr/http-message/src'),
'PhpParser\\' => array($vendorDir . '/nikic/php-parser/lib/PhpParser'),
'GuzzleHttp\\Psr7\\' => array($vendorDir . '/guzzlehttp/psr7/src'),
'GuzzleHttp\\Promise\\' => array($vendorDir . '/guzzlehttp/promises/src'),
'GuzzleHttp\\' => array($vendorDir . '/guzzlehttp/guzzle/src'),
'Graze\\GuzzleHttp\\JsonRpc\\' => array($vendorDir . '/graze/guzzle-jsonrpc/src'),
);
| apache-2.0 |
terrancesnyder/solr-analytics | lucene/queryparser/src/java/org/apache/lucene/queryparser/flexible/standard/parser/CharStream.java | 4012 | /* Generated By:JavaCC: Do not edit this line. CharStream.java Version 5.0 */
/* JavaCCOptions:STATIC=false,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package org.apache.lucene.queryparser.flexible.standard.parser;
/**
* This interface describes a character stream that maintains line and
* column number positions of the characters. It also has the capability
* to backup the stream to some extent. An implementation of this
* interface is used in the TokenManager implementation generated by
* JavaCCParser.
*
* All the methods except backup can be implemented in any fashion. backup
* needs to be implemented correctly for the correct operation of the lexer.
* Rest of the methods are all used to get information like line number,
* column number and the String that constitutes a token and are not used
* by the lexer. Hence their implementation won't affect the generated lexer's
* operation.
*/
public
interface CharStream {
/**
* Returns the next character from the selected input. The method
* of selecting the input is the responsibility of the class
* implementing this interface. Can throw any java.io.IOException.
*/
char readChar() throws java.io.IOException;
@Deprecated
/**
* Returns the column position of the character last read.
* @deprecated
* @see #getEndColumn
*/
int getColumn();
@Deprecated
/**
* Returns the line number of the character last read.
* @deprecated
* @see #getEndLine
*/
int getLine();
/**
* Returns the column number of the last character for current token (being
* matched after the last call to BeginTOken).
*/
int getEndColumn();
/**
* Returns the line number of the last character for current token (being
* matched after the last call to BeginTOken).
*/
int getEndLine();
/**
* Returns the column number of the first character for current token (being
* matched after the last call to BeginTOken).
*/
int getBeginColumn();
/**
* Returns the line number of the first character for current token (being
* matched after the last call to BeginTOken).
*/
int getBeginLine();
/**
* Backs up the input stream by amount steps. Lexer calls this method if it
* had already read some characters, but could not use them to match a
* (longer) token. So, they will be used again as the prefix of the next
* token and it is the implemetation's responsibility to do this right.
*/
void backup(int amount);
/**
* Returns the next character that marks the beginning of the next token.
* All characters must remain in the buffer between two successive calls
* to this method to implement backup correctly.
*/
char BeginToken() throws java.io.IOException;
/**
* Returns a string made up of characters from the marked token beginning
* to the current buffer position. Implementations have the choice of returning
* anything that they want to. For example, for efficiency, one might decide
* to just return null, which is a valid implementation.
*/
String GetImage();
/**
* Returns an array of characters that make up the suffix of length 'len' for
* the currently matched token. This is used to build up the matched string
* for use in actions in the case of MORE. A simple and inefficient
* implementation of this is as follows :
*
* {
* String t = GetImage();
* return t.substring(t.length() - len, t.length()).toCharArray();
* }
*/
char[] GetSuffix(int len);
/**
* The lexer calls this function to indicate that it is done with the stream
* and hence implementations can free any resources held by this class.
* Again, the body of this function can be just empty and it will not
* affect the lexer's operation.
*/
void Done();
}
/* JavaCC - OriginalChecksum=53b2ec7502d50e2290e86187a6c01270 (do not edit this line) */
| apache-2.0 |
stfalcon-studio/staginator | docker_templates/symfony-mysql-php56/configs/callback_runner.rb | 1680 | #!/usr/bin/env ruby
class CallbackRunner
attr_reader :config
def initialize(config_path)
require 'yaml'
require 'cocaine'
@config = read_config(config_path)
end
def after_clone_repo
deploy_callbacks('after_clone')
end
def before_composer_run
deploy_callbacks('before_composer')
end
def after_composer_run
deploy_callbacks('after_composer')
end
def after_deploy
deploy_callbacks('after_deploy', '/stag/www')
end
def install_packages
if @config['apt_packages']
apt_update
cmd = Cocaine::CommandLine.new("apt-get install -y #{@config['apt_packages'].join(' ')}")
pid = Process.spawn(cmd.command)
Process.wait(pid)
end
end
def start_services
if @config['run_services']
@config['run_services'].each do |service|
cmd = Cocaine::CommandLine.new("/etc/init.d/#{service} start")
pid = Process.spawn(cmd.command)
Process.wait(pid)
end
end
end
private
def read_config(config_path)
begin
@config = YAML.load_file(config_path)
end
rescue Errno::ENOENT
@config = false
end
def deploy_callbacks(step, pwd = '/stag/new')
if @config['deploy_callbacks'][step]
@config['deploy_callbacks'][step].each do |command|
cmd = Cocaine::CommandLine.new("cd #{pwd} && #{command}")
pid = Process.spawn(cmd.command)
Process.wait(pid)
end
end
end
def apt_update
pid = Process.spawn('apt-get update')
Process.wait(pid)
end
end
if __FILE__==$0
runner = CallbackRunner.new(ARGV[0]) # ./callback_runner.rb /path/to/staginator.yml stage
runner.send(ARGV[1]) if runner.config
end | apache-2.0 |
hirayama968/xx | Source/DataSources/Entity.js | 25020 | define([
'../Core/Cartesian3',
'../Core/Check',
'../Core/createGuid',
'../Core/defaultValue',
'../Core/defined',
'../Core/defineProperties',
'../Core/DeveloperError',
'../Core/Event',
'../Core/Matrix3',
'../Core/Matrix4',
'../Core/Quaternion',
'../Core/Transforms',
'../Scene/GroundPrimitive',
'./BillboardGraphics',
'./BoxGraphics',
'./ConstantPositionProperty',
'./CorridorGraphics',
'./createPropertyDescriptor',
'./createRawPropertyDescriptor',
'./CylinderGraphics',
'./EllipseGraphics',
'./EllipsoidGraphics',
'./LabelGraphics',
'./ModelGraphics',
'./PathGraphics',
'./PlaneGraphics',
'./PointGraphics',
'./PolygonGraphics',
'./PolylineGraphics',
'./PolylineVolumeGraphics',
'./Property',
'./PropertyBag',
'./RectangleGraphics',
'./WallGraphics'
], function(
Cartesian3,
Check,
createGuid,
defaultValue,
defined,
defineProperties,
DeveloperError,
Event,
Matrix3,
Matrix4,
Quaternion,
Transforms,
GroundPrimitive,
BillboardGraphics,
BoxGraphics,
ConstantPositionProperty,
CorridorGraphics,
createPropertyDescriptor,
createRawPropertyDescriptor,
CylinderGraphics,
EllipseGraphics,
EllipsoidGraphics,
LabelGraphics,
ModelGraphics,
PathGraphics,
PlaneGraphics,
PointGraphics,
PolygonGraphics,
PolylineGraphics,
PolylineVolumeGraphics,
Property,
PropertyBag,
RectangleGraphics,
WallGraphics) {
'use strict';
function createConstantPositionProperty(value) {
return new ConstantPositionProperty(value);
}
function createPositionPropertyDescriptor(name) {
return createPropertyDescriptor(name, undefined, createConstantPositionProperty);
}
function createPropertyTypeDescriptor(name, Type) {
return createPropertyDescriptor(name, undefined, function(value) {
if (value instanceof Type) {
return value;
}
return new Type(value);
});
}
/**
* Entity instances aggregate multiple forms of visualization into a single high-level object.
* They can be created manually and added to {@link Viewer#entities} or be produced by
* data sources, such as {@link CzmlDataSource} and {@link GeoJsonDataSource}.
* @alias Entity
* @constructor
*
* @param {Object} [options] Object with the following properties:
* @param {String} [options.id] A unique identifier for this object. If none is provided, a GUID is generated.
* @param {String} [options.name] A human readable name to display to users. It does not have to be unique.
* @param {TimeIntervalCollection} [options.availability] The availability, if any, associated with this object.
* @param {Boolean} [options.show] A boolean value indicating if the entity and its children are displayed.
* @param {Property} [options.description] A string Property specifying an HTML description for this entity.
* @param {PositionProperty} [options.position] A Property specifying the entity position.
* @param {Property} [options.orientation] A Property specifying the entity orientation.
* @param {Property} [options.viewFrom] A suggested initial offset for viewing this object.
* @param {Entity} [options.parent] A parent entity to associate with this entity.
* @param {BillboardGraphics} [options.billboard] A billboard to associate with this entity.
* @param {BoxGraphics} [options.box] A box to associate with this entity.
* @param {CorridorGraphics} [options.corridor] A corridor to associate with this entity.
* @param {CylinderGraphics} [options.cylinder] A cylinder to associate with this entity.
* @param {EllipseGraphics} [options.ellipse] A ellipse to associate with this entity.
* @param {EllipsoidGraphics} [options.ellipsoid] A ellipsoid to associate with this entity.
* @param {LabelGraphics} [options.label] A options.label to associate with this entity.
* @param {ModelGraphics} [options.model] A model to associate with this entity.
* @param {PathGraphics} [options.path] A path to associate with this entity.
* @param {PlaneGraphics} [options.plane] A plane to associate with this entity.
* @param {PointGraphics} [options.point] A point to associate with this entity.
* @param {PolygonGraphics} [options.polygon] A polygon to associate with this entity.
* @param {PolylineGraphics} [options.polyline] A polyline to associate with this entity.
* @param {PropertyBag} [options.properties] Arbitrary properties to associate with this entity.
* @param {PolylineVolumeGraphics} [options.polylineVolume] A polylineVolume to associate with this entity.
* @param {RectangleGraphics} [options.rectangle] A rectangle to associate with this entity.
* @param {WallGraphics} [options.wall] A wall to associate with this entity.
*
* @see {@link https://cesiumjs.org/tutorials/Visualizing-Spatial-Data/|Visualizing Spatial Data}
*/
function Entity(options) {
options = defaultValue(options, defaultValue.EMPTY_OBJECT);
var id = options.id;
if (!defined(id)) {
id = createGuid();
}
this._availability = undefined;
this._id = id;
this._definitionChanged = new Event();
this._name = options.name;
this._show = defaultValue(options.show, true);
this._parent = undefined;
this._propertyNames = ['billboard', 'box', 'corridor', 'cylinder', 'description', 'ellipse', //
'ellipsoid', 'label', 'model', 'orientation', 'path', 'plane', 'point', 'polygon', //
'polyline', 'polylineVolume', 'position', 'properties', 'rectangle', 'viewFrom', 'wall'];
this._billboard = undefined;
this._billboardSubscription = undefined;
this._box = undefined;
this._boxSubscription = undefined;
this._corridor = undefined;
this._corridorSubscription = undefined;
this._cylinder = undefined;
this._cylinderSubscription = undefined;
this._description = undefined;
this._descriptionSubscription = undefined;
this._ellipse = undefined;
this._ellipseSubscription = undefined;
this._ellipsoid = undefined;
this._ellipsoidSubscription = undefined;
this._label = undefined;
this._labelSubscription = undefined;
this._model = undefined;
this._modelSubscription = undefined;
this._orientation = undefined;
this._orientationSubscription = undefined;
this._path = undefined;
this._pathSubscription = undefined;
this._plane = undefined;
this._planeSubscription = undefined;
this._point = undefined;
this._pointSubscription = undefined;
this._polygon = undefined;
this._polygonSubscription = undefined;
this._polyline = undefined;
this._polylineSubscription = undefined;
this._polylineVolume = undefined;
this._polylineVolumeSubscription = undefined;
this._position = undefined;
this._positionSubscription = undefined;
this._properties = undefined;
this._propertiesSubscription = undefined;
this._rectangle = undefined;
this._rectangleSubscription = undefined;
this._viewFrom = undefined;
this._viewFromSubscription = undefined;
this._wall = undefined;
this._wallSubscription = undefined;
this._children = [];
/**
* Gets or sets the entity collection that this entity belongs to.
* @type {EntityCollection}
*/
this.entityCollection = undefined;
this.parent = options.parent;
this.merge(options);
}
function updateShow(entity, children, isShowing) {
var length = children.length;
for (var i = 0; i < length; i++) {
var child = children[i];
var childShow = child._show;
var oldValue = !isShowing && childShow;
var newValue = isShowing && childShow;
if (oldValue !== newValue) {
updateShow(child, child._children, isShowing);
}
}
entity._definitionChanged.raiseEvent(entity, 'isShowing', isShowing, !isShowing);
}
defineProperties(Entity.prototype, {
/**
* The availability, if any, associated with this object.
* If availability is undefined, it is assumed that this object's
* other properties will return valid data for any provided time.
* If availability exists, the objects other properties will only
* provide valid data if queried within the given interval.
* @memberof Entity.prototype
* @type {TimeIntervalCollection}
*/
availability : createRawPropertyDescriptor('availability'),
/**
* Gets the unique ID associated with this object.
* @memberof Entity.prototype
* @type {String}
*/
id : {
get : function() {
return this._id;
}
},
/**
* Gets the event that is raised whenever a property or sub-property is changed or modified.
* @memberof Entity.prototype
*
* @type {Event}
* @readonly
*/
definitionChanged : {
get : function() {
return this._definitionChanged;
}
},
/**
* Gets or sets the name of the object. The name is intended for end-user
* consumption and does not need to be unique.
* @memberof Entity.prototype
* @type {String}
*/
name : createRawPropertyDescriptor('name'),
/**
* Gets or sets whether this entity should be displayed. When set to true,
* the entity is only displayed if the parent entity's show property is also true.
* @memberof Entity.prototype
* @type {Boolean}
*/
show : {
get : function() {
return this._show;
},
set : function(value) {
//>>includeStart('debug', pragmas.debug);
if (!defined(value)) {
throw new DeveloperError('value is required.');
}
//>>includeEnd('debug');
if (value === this._show) {
return;
}
var wasShowing = this.isShowing;
this._show = value;
var isShowing = this.isShowing;
if (wasShowing !== isShowing) {
updateShow(this, this._children, isShowing);
}
this._definitionChanged.raiseEvent(this, 'show', value, !value);
}
},
/**
* Gets whether this entity is being displayed, taking into account
* the visibility of any ancestor entities.
* @memberof Entity.prototype
* @type {Boolean}
*/
isShowing : {
get : function() {
return this._show && (!defined(this.entityCollection) || this.entityCollection.show) && (!defined(this._parent) || this._parent.isShowing);
}
},
/**
* Gets or sets the parent object.
* @memberof Entity.prototype
* @type {Entity}
*/
parent : {
get : function() {
return this._parent;
},
set : function(value) {
var oldValue = this._parent;
if (oldValue === value) {
return;
}
var wasShowing = this.isShowing;
if (defined(oldValue)) {
var index = oldValue._children.indexOf(this);
oldValue._children.splice(index, 1);
}
this._parent = value;
if (defined(value)) {
value._children.push(this);
}
var isShowing = this.isShowing;
if (wasShowing !== isShowing) {
updateShow(this, this._children, isShowing);
}
this._definitionChanged.raiseEvent(this, 'parent', value, oldValue);
}
},
/**
* Gets the names of all properties registered on this instance.
* @memberof Entity.prototype
* @type {Array}
*/
propertyNames : {
get : function() {
return this._propertyNames;
}
},
/**
* Gets or sets the billboard.
* @memberof Entity.prototype
* @type {BillboardGraphics}
*/
billboard : createPropertyTypeDescriptor('billboard', BillboardGraphics),
/**
* Gets or sets the box.
* @memberof Entity.prototype
* @type {BoxGraphics}
*/
box : createPropertyTypeDescriptor('box', BoxGraphics),
/**
* Gets or sets the corridor.
* @memberof Entity.prototype
* @type {CorridorGraphics}
*/
corridor : createPropertyTypeDescriptor('corridor', CorridorGraphics),
/**
* Gets or sets the cylinder.
* @memberof Entity.prototype
* @type {CylinderGraphics}
*/
cylinder : createPropertyTypeDescriptor('cylinder', CylinderGraphics),
/**
* Gets or sets the description.
* @memberof Entity.prototype
* @type {Property}
*/
description : createPropertyDescriptor('description'),
/**
* Gets or sets the ellipse.
* @memberof Entity.prototype
* @type {EllipseGraphics}
*/
ellipse : createPropertyTypeDescriptor('ellipse', EllipseGraphics),
/**
* Gets or sets the ellipsoid.
* @memberof Entity.prototype
* @type {EllipsoidGraphics}
*/
ellipsoid : createPropertyTypeDescriptor('ellipsoid', EllipsoidGraphics),
/**
* Gets or sets the label.
* @memberof Entity.prototype
* @type {LabelGraphics}
*/
label : createPropertyTypeDescriptor('label', LabelGraphics),
/**
* Gets or sets the model.
* @memberof Entity.prototype
* @type {ModelGraphics}
*/
model : createPropertyTypeDescriptor('model', ModelGraphics),
/**
* Gets or sets the orientation.
* @memberof Entity.prototype
* @type {Property}
*/
orientation : createPropertyDescriptor('orientation'),
/**
* Gets or sets the path.
* @memberof Entity.prototype
* @type {PathGraphics}
*/
path : createPropertyTypeDescriptor('path', PathGraphics),
/**
* Gets or sets the plane.
* @memberof Entity.prototype
* @type {PlaneGraphics}
*/
plane : createPropertyTypeDescriptor('plane', PlaneGraphics),
/**
* Gets or sets the point graphic.
* @memberof Entity.prototype
* @type {PointGraphics}
*/
point : createPropertyTypeDescriptor('point', PointGraphics),
/**
* Gets or sets the polygon.
* @memberof Entity.prototype
* @type {PolygonGraphics}
*/
polygon : createPropertyTypeDescriptor('polygon', PolygonGraphics),
/**
* Gets or sets the polyline.
* @memberof Entity.prototype
* @type {PolylineGraphics}
*/
polyline : createPropertyTypeDescriptor('polyline', PolylineGraphics),
/**
* Gets or sets the polyline volume.
* @memberof Entity.prototype
* @type {PolylineVolumeGraphics}
*/
polylineVolume : createPropertyTypeDescriptor('polylineVolume', PolylineVolumeGraphics),
/**
* Gets or sets the bag of arbitrary properties associated with this entity.
* @memberof Entity.prototype
* @type {PropertyBag}
*/
properties : createPropertyTypeDescriptor('properties', PropertyBag),
/**
* Gets or sets the position.
* @memberof Entity.prototype
* @type {PositionProperty}
*/
position : createPositionPropertyDescriptor('position'),
/**
* Gets or sets the rectangle.
* @memberof Entity.prototype
* @type {RectangleGraphics}
*/
rectangle : createPropertyTypeDescriptor('rectangle', RectangleGraphics),
/**
* Gets or sets the suggested initial offset for viewing this object
* with the camera. The offset is defined in the east-north-up reference frame.
* @memberof Entity.prototype
* @type {Property}
*/
viewFrom : createPropertyDescriptor('viewFrom'),
/**
* Gets or sets the wall.
* @memberof Entity.prototype
* @type {WallGraphics}
*/
wall : createPropertyTypeDescriptor('wall', WallGraphics)
});
/**
* Given a time, returns true if this object should have data during that time.
*
* @param {JulianDate} time The time to check availability for.
* @returns {Boolean} true if the object should have data during the provided time, false otherwise.
*/
Entity.prototype.isAvailable = function(time) {
//>>includeStart('debug', pragmas.debug);
if (!defined(time)) {
throw new DeveloperError('time is required.');
}
//>>includeEnd('debug');
var availability = this._availability;
return !defined(availability) || availability.contains(time);
};
/**
* Adds a property to this object. Once a property is added, it can be
* observed with {@link Entity#definitionChanged} and composited
* with {@link CompositeEntityCollection}
*
* @param {String} propertyName The name of the property to add.
*
* @exception {DeveloperError} "propertyName" is a reserved property name.
* @exception {DeveloperError} "propertyName" is already a registered property.
*/
Entity.prototype.addProperty = function(propertyName) {
var propertyNames = this._propertyNames;
//>>includeStart('debug', pragmas.debug);
if (!defined(propertyName)) {
throw new DeveloperError('propertyName is required.');
}
if (propertyNames.indexOf(propertyName) !== -1) {
throw new DeveloperError(propertyName + ' is already a registered property.');
}
if (propertyName in this) {
throw new DeveloperError(propertyName + ' is a reserved property name.');
}
//>>includeEnd('debug');
propertyNames.push(propertyName);
Object.defineProperty(this, propertyName, createRawPropertyDescriptor(propertyName, true));
};
/**
* Removed a property previously added with addProperty.
*
* @param {String} propertyName The name of the property to remove.
*
* @exception {DeveloperError} "propertyName" is a reserved property name.
* @exception {DeveloperError} "propertyName" is not a registered property.
*/
Entity.prototype.removeProperty = function(propertyName) {
var propertyNames = this._propertyNames;
var index = propertyNames.indexOf(propertyName);
//>>includeStart('debug', pragmas.debug);
if (!defined(propertyName)) {
throw new DeveloperError('propertyName is required.');
}
if (index === -1) {
throw new DeveloperError(propertyName + ' is not a registered property.');
}
//>>includeEnd('debug');
this._propertyNames.splice(index, 1);
delete this[propertyName];
};
/**
* Assigns each unassigned property on this object to the value
* of the same property on the provided source object.
*
* @param {Entity} source The object to be merged into this object.
*/
Entity.prototype.merge = function(source) {
//>>includeStart('debug', pragmas.debug);
if (!defined(source)) {
throw new DeveloperError('source is required.');
}
//>>includeEnd('debug');
//Name, show, and availability are not Property objects and are currently handled differently.
//source.show is intentionally ignored because this.show always has a value.
this.name = defaultValue(this.name, source.name);
this.availability = defaultValue(source.availability, this.availability);
var propertyNames = this._propertyNames;
var sourcePropertyNames = defined(source._propertyNames) ? source._propertyNames : Object.keys(source);
var propertyNamesLength = sourcePropertyNames.length;
for (var i = 0; i < propertyNamesLength; i++) {
var name = sourcePropertyNames[i];
//Ignore parent when merging, this only happens at construction time.
if (name === 'parent') {
continue;
}
var targetProperty = this[name];
var sourceProperty = source[name];
//Custom properties that are registered on the source entity must also
//get registered on this entity.
if (!defined(targetProperty) && propertyNames.indexOf(name) === -1) {
this.addProperty(name);
}
if (defined(sourceProperty)) {
if (defined(targetProperty)) {
if (defined(targetProperty.merge)) {
targetProperty.merge(sourceProperty);
}
} else if (defined(sourceProperty.merge) && defined(sourceProperty.clone)) {
this[name] = sourceProperty.clone();
} else {
this[name] = sourceProperty;
}
}
}
};
var matrix3Scratch = new Matrix3();
var positionScratch = new Cartesian3();
var orientationScratch = new Quaternion();
/**
* Computes the model matrix for the entity's transform at specified time. Returns undefined if orientation or position
* are undefined.
*
* @param {JulianDate} time The time to retrieve model matrix for.
* @param {Matrix4} [result] The object onto which to store the result.
*
* @returns {Matrix4} The modified result parameter or a new Matrix4 instance if one was not provided. Result is undefined if position or orientation are undefined.
*/
Entity.prototype.computeModelMatrix = function(time, result) {
Check.typeOf.object('time', time);
var position = Property.getValueOrUndefined(this._position, time, positionScratch);
if (!defined(position)) {
return undefined;
}
var orientation = Property.getValueOrUndefined(this._orientation, time, orientationScratch);
if (!defined(orientation)) {
result = Transforms.eastNorthUpToFixedFrame(position, undefined, result);
} else {
result = Matrix4.fromRotationTranslation(Matrix3.fromQuaternion(orientation, matrix3Scratch), position, result);
}
return result;
};
/**
* Checks if the given Scene supports materials besides Color on Entities draped on terrain.
* If this feature is not supported, Entities with non-color materials but no `height` will
* instead be rendered as if height is 0.
*
* @param {Scene} scene The current scene.
* @returns {Boolean} Whether or not the current scene supports materials for entities on terrain.
*/
Entity.supportsMaterialsforEntitiesOnTerrain = function(scene) {
return GroundPrimitive.supportsMaterials(scene);
};
return Entity;
});
| apache-2.0 |
galaran/GalaransHeroesSkills | SkillSummonBoat/src/mccity/heroes/skills/summonboat/SkillSummonBoat.java | 3337 | package mccity.heroes.skills.summonboat;
import com.herocraftonline.heroes.Heroes;
import com.herocraftonline.heroes.api.SkillResult;
import com.herocraftonline.heroes.characters.Hero;
import com.herocraftonline.heroes.characters.skill.ActiveSkill;
import com.herocraftonline.heroes.characters.skill.SkillConfigManager;
import com.herocraftonline.heroes.characters.skill.SkillType;
import com.herocraftonline.heroes.util.Messaging;
import com.herocraftonline.heroes.characters.skill.SkillSetting;
import com.herocraftonline.heroes.util.Util;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Boat;
import org.bukkit.entity.Player;
public class SkillSummonBoat extends ActiveSkill {
private String cantSummonHere;
public SkillSummonBoat(Heroes plugin) {
super(plugin, "SummonBoat");
setDescription("Summons a boat.");
setUsage("/skill summonboat");
setArgumentRange(0, 0);
setIdentifiers("skill summonboat");
setTypes(SkillType.SUMMON, SkillType.KNOWLEDGE);
}
@Override
public void init() {
super.init();
cantSummonHere = SkillConfigManager.getRaw(this, "cant-summon-here", "Can't summon boat here");
}
public ConfigurationSection getDefaultConfig() {
ConfigurationSection defaultConfig = super.getDefaultConfig();
defaultConfig.set(SkillSetting.COOLDOWN.node(), 3600000);
defaultConfig.set(SkillSetting.MANA.node(), 50);
defaultConfig.set(SkillSetting.MAX_DISTANCE.node(), 5);
defaultConfig.set("cant-summon-here-text", "Can't summon boat here");
return defaultConfig;
}
public String getDescription(Hero hero) {
StringBuilder descr = new StringBuilder(getDescription());
double cdSec = SkillConfigManager.getUseSetting(hero, this, SkillSetting.COOLDOWN, 3600000, false) / 1000.0;
if (cdSec > 0) {
descr.append(" CD:");
descr.append(Util.formatDouble(cdSec));
descr.append("s");
}
int mana = SkillConfigManager.getUseSetting(hero, this, SkillSetting.MANA, 50, false);
if (mana > 0) {
descr.append(" M:");
descr.append(mana);
}
double distance = SkillConfigManager.getUseSetting(hero, this, SkillSetting.MAX_DISTANCE.node(), 5, false);
if (distance > 0) {
descr.append(" Dist:");
descr.append(Util.formatDouble(distance));
}
return descr.toString();
}
@Override
public SkillResult use(Hero hero, String[] args) {
Player player = hero.getPlayer();
int maxDistance = SkillConfigManager.getUseSetting(hero, this, SkillSetting.MAX_DISTANCE.node(), 5, false);
Block targetBlock = player.getTargetBlock(null, maxDistance);
if (targetBlock != null && (targetBlock.getType() == Material.WATER || targetBlock.getType() == Material.STATIONARY_WATER)) {
Location spawnLoc = targetBlock.getLocation().add(0.5, 1, 0.5);
targetBlock.getWorld().spawn(spawnLoc, Boat.class);
return SkillResult.NORMAL;
} else {
Messaging.send(player, cantSummonHere);
return SkillResult.FAIL;
}
}
}
| apache-2.0 |
upenn-libraries/xmlaminar | cli/src/main/java/edu/upenn/library/xmlaminar/cli/PipelineCommandFactory.java | 9389 | /*
* Copyright 2011-2015 The Trustees of the University of Pennsylvania
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.upenn.library.xmlaminar.cli;
import edu.upenn.library.xmlaminar.DevNullContentHandler;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLFilter;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLFilterImpl;
/**
*
* @author magibney
*/
public class PipelineCommandFactory extends CommandFactory {
private static final String KEY = "pipeline";
static {
registerCommandFactory(new PipelineCommandFactory());
}
private final boolean xmlConfigured;
private final boolean first;
private InitCommand inputBase;
private final CommandType maxType;
public PipelineCommandFactory() {
this(false, false, null, null);
}
private PipelineCommandFactory(boolean first, InitCommand inputBase, CommandType maxType) {
this(true, first, inputBase, maxType);
}
private PipelineCommandFactory(boolean xmlConfigured, boolean first, InitCommand inputBase, CommandType maxType) {
this.xmlConfigured = xmlConfigured;
this.first = first;
this.inputBase = inputBase;
this.maxType = maxType;
}
@Override
public PipelineCommandFactory getConfiguringXMLFilter(boolean first, InitCommand inputBase, CommandType maxType) {
return new PipelineCommandFactory(first, inputBase, maxType);
}
@Override
public Command newCommand(boolean first, boolean last) {
if (!xmlConfigured) {
throw new IllegalStateException();
}
return new PipelineCommand(this.first, last);
}
@Override
public String getKey() {
return KEY;
}
private final LinkedList<Entry<CommandFactory, String[]>> commandFactories = new LinkedList<Entry<CommandFactory, String[]>>();
private int depth = -1;
private void reset() {
depth = -1;
commandFactories.clear();
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
depth--;
super.endElement(uri, localName, qName);
}
private final Map<String, CommandFactory> cfs = CommandFactory.getAvailableCommandFactories();
private int delegateDepth = Integer.MAX_VALUE;
private static final ContentHandler DEV_NULL_CONTENT_HANDLER = new DevNullContentHandler();
@Override
public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException {
ConfigCommandFactory.verifyNamespaceURI(uri);
if (depth == 0) {
if (!localName.equals(first ? "source" : "filter")) {
throw new IllegalStateException("bad element localName for first=" + first + "; expected "
+ (first ? "source" : "filter") + ", found " + localName);
}
super.startElement(uri, localName, qName, atts);
} else if (depth == 1) {
if (!localName.equals(first && commandFactories.isEmpty() ? "source" : "filter")) {
throw new IllegalStateException("bad element localName for first=" + first + "; expected "
+ (first ? "source" : "filter") + ", found " + localName);
}
String type;
CommandFactory cf = cfs.get(type = atts.getValue("type"));
if (cf == null) {
throw new IllegalArgumentException("type must be one of " + cfs + "; found " + type);
}
CommandFactory currentCommandFactory = cf.getConfiguringXMLFilter(first && commandFactories.isEmpty(), inputBase, maxType);
if (currentCommandFactory == null) {
currentCommandFactory = new ConfigCommandFactory(false, first && commandFactories.isEmpty(), inputBase, maxType);
}
delegateDepth = depth;
XMLReader parent = getParent();
parent.setContentHandler(passThrough);
passThrough.setParent(parent);
if (cf instanceof InputCommandFactory) {
if (inputBase == null || !inputBase.isExplicit()) {
inputBase = (InputCommandFactory.InputCommand) cf.newCommand(first, false);
inputCommandFactory = (ConfigCommandFactory) currentCommandFactory;
passThrough.setContentHandler(currentCommandFactory);
currentCommandFactory.setParent(passThrough);
currentCommandFactory.startDocument();
currentCommandFactory.startElement(uri, localName, qName, atts);
} else {
passThrough.setContentHandler(DEV_NULL_CONTENT_HANDLER);
}
} else {
passThrough.setContentHandler(currentCommandFactory);
currentCommandFactory.setParent(passThrough);
currentCommandFactory.startDocument();
currentCommandFactory.startElement(uri, localName, qName, atts);
commandFactories.add(new AbstractMap.SimpleImmutableEntry<CommandFactory, String[]>(currentCommandFactory, null));
}
} else {
throw new AssertionError("this should never happen");
}
depth++;
}
private ConfigCommandFactory inputCommandFactory;
private final PassThroughXMLFilter passThrough = new PassThroughXMLFilter();
private class PassThroughXMLFilter extends XMLFilterImpl {
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
super.endElement(uri, localName, qName);
if (--depth <= delegateDepth) {
super.endDocument();
delegateDepth = Integer.MAX_VALUE;
getParent().setContentHandler(PipelineCommandFactory.this);
}
}
@Override
public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException {
super.startElement(uri, localName, qName, atts);
depth++;
}
}
@Override
public void endDocument() throws SAXException {
depth--;
super.endDocument();
}
@Override
public void startDocument() throws SAXException {
reset();
super.startDocument();
depth++;
}
private class PipelineCommand implements Command<InputCommandFactory.InputCommand> {
private final boolean first;
private final boolean last;
private Driver.XMLFilterSource<InputCommandFactory.InputCommand> xmlFilterSource;
public PipelineCommand(boolean first, boolean last) {
this.first = first;
this.last = last;
}
@Override
public XMLFilter getXMLFilter(ArgFactory arf, InputCommandFactory.InputCommand inputBase, CommandType maxType) {
try {
String[] args;
if (inputCommandFactory != null) {
args = inputCommandFactory.constructCommandLineArgs(PipelineCommandFactory.this.inputBase);
PipelineCommandFactory.this.inputBase.setInputArgs(args);
}
xmlFilterSource = Driver.chainCommands(first, PipelineCommandFactory.this.inputBase, commandFactories.iterator(), last);
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
return xmlFilterSource.getXMLReader();
}
@Override
public InputSource getInput() throws FileNotFoundException {
return xmlFilterSource.getInputSource();
}
@Override
public File getInputBase() {
return xmlFilterSource.getInputBase();
}
@Override
public void printHelpOn(OutputStream out) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public CommandType getCommandType() {
return xmlFilterSource.getCommandType();
}
@Override
public boolean handlesOutput() {
return xmlFilterSource.handlesOutput();
}
@Override
public InputCommandFactory.InputCommand inputHandler() {
return xmlFilterSource.inputHandler();
}
}
}
| apache-2.0 |
BrunoEberhard/minimal-j | example/008_ERP/src/org/minimalj/example/erp/model/OfferView.java | 336 | package org.minimalj.example.erp.model;
import org.minimalj.model.Keys;
import org.minimalj.model.View;
public class OfferView implements View<Offer> {
public static final OfferView $ = Keys.of(OfferView.class);
public Object id;
public String offer_nr;
public String title;
public String display() {
return title;
}
}
| apache-2.0 |
msebire/intellij-community | platform/smRunner/src/com/intellij/execution/testframework/sm/runner/GeneralIdBasedToSMTRunnerEventsConvertor.java | 18832 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.testframework.sm.runner;
import com.intellij.execution.testframework.Printer;
import com.intellij.execution.testframework.sm.runner.events.*;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.hash.HashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class GeneralIdBasedToSMTRunnerEventsConvertor extends GeneralTestEventsProcessor {
private static final Logger LOG = Logger.getInstance(GeneralIdBasedToSMTRunnerEventsConvertor.class);
private final Map<String, Node> myNodeByIdMap = ContainerUtil.newConcurrentMap();
private final Set<Node> myRunningTestNodes = ContainerUtil.newConcurrentSet();
private final Set<Node> myRunningSuiteNodes = ContainerUtil.newConcurrentSet();
private final Node myTestsRootNode;
private boolean myIsTestingFinished = false;
private TestProxyPrinterProvider myTestProxyPrinterProvider = null;
public GeneralIdBasedToSMTRunnerEventsConvertor(Project project,
@NotNull SMTestProxy.SMRootTestProxy testsRootProxy,
@NotNull String testFrameworkName) {
super(project, testFrameworkName, testsRootProxy);
myTestsRootNode = new Node(TreeNodeEvent.ROOT_NODE_ID, null, testsRootProxy);
myNodeByIdMap.put(myTestsRootNode.getId(), myTestsRootNode);
}
@Override
public void onStartTesting() {
LOG.debug("onStartTesting");
myTestsRootNode.setState(State.RUNNING, this);
myTestsRootProxy.setStarted();
fireOnTestingStarted(myTestsRootProxy);
}
@Override
public void onTestsReporterAttached() {
fireOnTestsReporterAttached(myTestsRootProxy);
}
@Override
public void onFinishTesting() {
fireOnBeforeTestingFinished(myTestsRootProxy);
LOG.debug("onFinishTesting");
// has been already invoked!
// We don't know whether process was destroyed by user
// or it finished after all tests have been run
// Lets assume, if at finish all nodes except root suite have final state (passed, failed or ignored),
// then all is ok otherwise process was terminated by user
LOG.debug("onFinishTesting: invoked");
if (myIsTestingFinished) {
LOG.debug("has already been invoked");
// has been already invoked!
return;
}
myIsTestingFinished = true;
// We don't know whether process was destroyed by user
// or it finished after all tests have been run
// Lets assume, if at finish all nodes except root suite have final state (passed, failed or ignored),
// then all is ok otherwise process was terminated by user
boolean completeTree = isTreeComplete(myRunningTestNodes, myTestsRootProxy);
if (LOG.isDebugEnabled()) {
LOG.debug("completeTree:" + completeTree);
}
if (completeTree) {
myTestsRootProxy.setFinished();
}
else {
myTestsRootProxy.setTerminated();
}
if (!myRunningTestNodes.isEmpty()) {
logProblem("Unexpected running nodes: " + myRunningTestNodes);
}
myNodeByIdMap.clear();
myRunningTestNodes.clear();
myRunningSuiteNodes.clear();
fireOnTestingFinished(myTestsRootProxy);
super.onFinishTesting();
}
@Override
public void setPrinterProvider(@NotNull TestProxyPrinterProvider printerProvider) {
myTestProxyPrinterProvider = printerProvider;
}
@Override
public void onTestStarted(@NotNull final TestStartedEvent testStartedEvent) {
if (LOG.isDebugEnabled()) {
LOG.debug("onTestStarted " + testStartedEvent.getId());
}
doStartNode(testStartedEvent, false);
}
@Override
public void onSuiteStarted(@NotNull final TestSuiteStartedEvent suiteStartedEvent) {
if (LOG.isDebugEnabled()) {
LOG.debug("onSuiteStarted " + suiteStartedEvent.getId());
}
doStartNode(suiteStartedEvent, true);
}
private void doStartNode(@NotNull BaseStartedNodeEvent startedNodeEvent, boolean suite) {
if (LOG.isDebugEnabled()) {
LOG.debug("doStartNode " + startedNodeEvent.getId());
}
Node node = findNode(startedNodeEvent);
if (node != null) {
if (node.getState() == State.NOT_RUNNING && startedNodeEvent.isRunning()) {
setNodeAndAncestorsRunning(node);
}
else {
logProblem(startedNodeEvent + " has been already started: " + node + "!");
}
return;
}
node = createNode(startedNodeEvent, suite);
if (node == null) return;
if (startedNodeEvent.isRunning()) {
setNodeAndAncestorsRunning(node);
}
}
private Node createNode(@NotNull BaseStartedNodeEvent startedNodeEvent, boolean suite) {
Node parentNode = findValidParentNode(startedNodeEvent);
if (parentNode == null) {
return null;
}
String nodeId = validateAndGetNodeId(startedNodeEvent);
if (nodeId == null) {
return null;
}
String nodeName = startedNodeEvent.getName();
SMTestProxy childProxy = new SMTestProxy(nodeName, suite, startedNodeEvent.getLocationUrl(), startedNodeEvent.getMetainfo(), true);
childProxy.putUserData(SMTestProxy.NODE_ID, startedNodeEvent.getId());
childProxy.setTreeBuildBeforeStart();
TestProxyPrinterProvider printerProvider = myTestProxyPrinterProvider;
String nodeType = startedNodeEvent.getNodeType();
if (printerProvider != null && nodeType != null && nodeName != null) {
Printer printer = printerProvider.getPrinterByType(nodeType, nodeName, startedNodeEvent.getNodeArgs());
if (printer != null) {
childProxy.setPreferredPrinter(printer);
}
}
Node node = new Node(nodeId, parentNode, childProxy);
myNodeByIdMap.put(startedNodeEvent.getId(), node);
if (myLocator != null) {
childProxy.setLocator(myLocator);
}
parentNode.getProxy().addChild(childProxy);
return node;
}
@Override
protected SMTestProxy createSuite(String suiteName, String locationHint, String metaInfo, String id, String parentNodeId) {
Node node = createNode(new TestSuiteStartedEvent(suiteName, id, parentNodeId, locationHint, metaInfo, null, null, false), true);
return node.getProxy();
}
@Override
protected SMTestProxy createProxy(String testName, String locationHint, String metaInfo, String id, String parentNodeId) {
Node node = createNode(new TestStartedEvent(testName, id, parentNodeId, locationHint, metaInfo, null, null, false), false);
return node.getProxy();
}
@Nullable
private Node findValidParentNode(@NotNull BaseStartedNodeEvent startedNodeEvent) {
String parentId = startedNodeEvent.getParentId();
if (parentId == null) {
logProblem("Parent node id should be defined: " + startedNodeEvent + ".", true);
return null;
}
Node parentNode = myNodeByIdMap.get(parentId);
if (parentNode == null) {
logProblem("Parent node is undefined for " + startedNodeEvent + ".", true);
return null;
}
if (parentNode.getState() != State.NOT_RUNNING && parentNode.getState() != State.RUNNING) {
logProblem("Parent node should be registered or running: " + parentNode + ", " + startedNodeEvent);
return null;
}
return parentNode;
}
@Override
public void onTestFinished(@NotNull final TestFinishedEvent testFinishedEvent) {
LOG.debug("onTestFinished");
Node node = findNodeToTerminate(testFinishedEvent);
if (node != null) {
SMTestProxy testProxy = node.getProxy();
final Long duration = testFinishedEvent.getDuration();
if (duration != null) {
testProxy.setDuration(duration);
}
testProxy.setFrameworkOutputFile(testFinishedEvent.getOutputFile());
testProxy.setFinished();
fireOnTestFinishedIfNeeded(testProxy, node);
terminateNode(node, State.FINISHED);
}
}
private void fireOnTestFinishedIfNeeded(@NotNull SMTestProxy testProxy, @NotNull Node node) {
// allow clients to omit sending 'testFinished' messages after 'testFailed'/'testIgnored' messages
if (node.getState() != State.FINISHED && node.getState() != State.FAILED && node.getState() != State.IGNORED) {
LOG.debug("onTestFinished: state != FINISHED && state != FAILED && state != IGNORED");
// Don't count the same test twice if 'testFailed' or 'testIgnored' message is followed by 'testFinished' message
// which may happen if generated TeamCity messages adhere rules from
// https://confluence.jetbrains.com/display/TCD10/Build+Script+Interaction+with+TeamCity
// Anyway, this id-based converter already breaks TeamCity protocol by expecting messages with
// non-standard TeamCity attributes: 'nodeId'/'parentNodeId' instead of 'name'.
fireOnTestFinished(testProxy);
}
}
@Override
public void onSuiteFinished(@NotNull final TestSuiteFinishedEvent suiteFinishedEvent) {
if (LOG.isDebugEnabled()) {
LOG.debug("onSuiteFinished " + suiteFinishedEvent.getId());
}
Node node = findNodeToTerminate(suiteFinishedEvent);
if (node != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("finished:" + node.myId);
}
SMTestProxy suiteProxy = node.getProxy();
suiteProxy.setFinished();
fireOnSuiteFinished(suiteProxy);
terminateNode(node, State.FINISHED);
}
}
@Nullable
private Node findNodeToTerminate(@NotNull TreeNodeEvent treeNodeEvent) {
Node node = findNode(treeNodeEvent);
if (node == null) {
logProblem("Trying to finish nonexistent node: " + treeNodeEvent);
return null;
}
return node;
}
@Override
public void onUncapturedOutput(@NotNull final String text, final Key outputType) {
LOG.debug("onUncapturedOutput " + text);
Node activeNode = findActiveNode();
SMTestProxy activeProxy = activeNode.getProxy();
activeProxy.addOutput(text, outputType);
}
@Override
public void onError(@NotNull final String localizedMessage,
@Nullable final String stackTrace,
final boolean isCritical) {
onError(null, localizedMessage, stackTrace, isCritical);
}
public void onError(@Nullable final String nodeId,
@NotNull final String localizedMessage,
@Nullable final String stackTrace,
final boolean isCritical) {
LOG.debug("onError " + localizedMessage);
SMTestProxy activeProxy = null;
if (nodeId != null) {
activeProxy = findProxyById(nodeId);
}
if (activeProxy == null) {
Node activeNode = findActiveNode();
activeProxy = activeNode.getProxy();
}
activeProxy.addError(localizedMessage, stackTrace, isCritical);
}
@Override
public void onTestFailure(@NotNull final TestFailedEvent testFailedEvent) {
if (LOG.isDebugEnabled()) {
LOG.debug("onTestFailure " + testFailedEvent.getId());
}
((Runnable)() -> {
if (LOG.isDebugEnabled()) {
LOG.debug("onTestFailure invoked " + testFailedEvent.getId());
}
Node node = findNodeToTerminate(testFailedEvent);
if (node == null) {
return;
}
SMTestProxy testProxy = node.getProxy();
String comparisonFailureActualText = testFailedEvent.getComparisonFailureActualText();
String comparisonFailureExpectedText = testFailedEvent.getComparisonFailureExpectedText();
String failureMessage = testFailedEvent.getLocalizedFailureMessage();
String stackTrace = testFailedEvent.getStacktrace();
if (comparisonFailureActualText != null && comparisonFailureExpectedText != null) {
testProxy
.setTestComparisonFailed(failureMessage, stackTrace, comparisonFailureActualText, comparisonFailureExpectedText, testFailedEvent);
}
else if (comparisonFailureActualText == null && comparisonFailureExpectedText == null) {
testProxy.setTestFailed(failureMessage, stackTrace, testFailedEvent.isTestError());
}
else {
logProblem("Comparison failure actual and expected texts should be both null or not null.\n"
+ "Expected:\n"
+ comparisonFailureExpectedText + "\n"
+ "Actual:\n"
+ comparisonFailureActualText);
}
long duration = testFailedEvent.getDurationMillis();
if (duration >= 0) {
testProxy.setDuration(duration);
}
fireOnTestFailed(testProxy);
fireOnTestFinishedIfNeeded(testProxy, node);
terminateNode(node, State.FAILED);
}).run();
}
@Override
public void onTestIgnored(@NotNull final TestIgnoredEvent testIgnoredEvent) {
LOG.debug("onTestIgnored");
Node node = findNodeToTerminate(testIgnoredEvent);
if (node != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("onTestIgnored node " + node.myId);
}
SMTestProxy testProxy = node.getProxy();
testProxy.setTestIgnored(testIgnoredEvent.getIgnoreComment(), testIgnoredEvent.getStacktrace());
fireOnTestIgnored(testProxy);
fireOnTestFinishedIfNeeded(testProxy, node);
terminateNode(node, State.IGNORED);
}
}
@Override
public void onTestOutput(@NotNull final TestOutputEvent testOutputEvent) {
LOG.debug("onTestOutput");
Node node = findNode(testOutputEvent);
if (node == null) {
logProblem("Test wasn't started! But " + testOutputEvent + "!");
return;
}
node.getProxy().addOutput(testOutputEvent.getText(), testOutputEvent.getOutputType());
}
@Override
public void onTestsCountInSuite(final int count) {
LOG.debug("onTestsCountInSuite");
fireOnTestsCountInSuite(count);
}
@Nullable
private String validateAndGetNodeId(@NotNull TreeNodeEvent treeNodeEvent) {
String nodeId = treeNodeEvent.getId();
if (nodeId == null || nodeId.equals(TreeNodeEvent.ROOT_NODE_ID)) {
logProblem((nodeId == null ? "Missing" : "Illegal") + " nodeId: " + treeNodeEvent, true);
}
return nodeId;
}
@Nullable
private Node findNode(@NotNull TreeNodeEvent treeNodeEvent) {
String nodeId = validateAndGetNodeId(treeNodeEvent);
return nodeId != null ? myNodeByIdMap.get(nodeId) : null;
}
@Nullable
public SMTestProxy findProxyById(@NotNull String id) {
Node node = myNodeByIdMap.get(id);
return node != null ? node.getProxy() : null;
}
/*
* Remove listeners, etc
*/
@Override
public void dispose() {
super.dispose();
if (!myRunningTestNodes.isEmpty()) {
Application application = ApplicationManager.getApplication();
if (!application.isHeadlessEnvironment() && !application.isUnitTestMode()) {
logProblem("Not all events were processed!");
}
}
myRunningTestNodes.clear();
myRunningSuiteNodes.clear();
myNodeByIdMap.clear();
}
private void setNodeAndAncestorsRunning(@NotNull Node lowestNode) {
Node node = lowestNode;
while (node != null && node != myTestsRootNode && node.getState() == State.NOT_RUNNING) {
node.setState(State.RUNNING, this);
SMTestProxy proxy = node.getProxy();
proxy.setStarted();
if (proxy.isSuite()) {
myRunningSuiteNodes.add(node);
fireOnSuiteStarted(proxy);
} else {
myRunningTestNodes.add(lowestNode);
fireOnTestStarted(proxy);
}
node = node.getParentNode();
}
}
private void terminateNode(@NotNull Node node, @NotNull State terminateState) {
if (LOG.isDebugEnabled()) {
LOG.debug("terminateNode " + node.myId);
}
node.setState(terminateState, this);
myRunningTestNodes.remove(node);
myRunningSuiteNodes.remove(node);
}
@NotNull
private Node findActiveNode() {
Iterator<Node> testsIterator = myRunningTestNodes.iterator();
if (testsIterator.hasNext()) {
return testsIterator.next();
}
Iterator<Node> suitesIterator = myRunningSuiteNodes.iterator();
if (suitesIterator.hasNext()) {
return suitesIterator.next();
}
return myTestsRootNode;
}
private enum State {
NOT_RUNNING, RUNNING, FINISHED, FAILED, IGNORED
}
private static class Node {
private final String myId;
private final Node myParentNode;
private final SMTestProxy myProxy;
private State myState;
Node(@NotNull String id, @Nullable Node parentNode, @NotNull SMTestProxy proxy) {
myId = id;
myParentNode = parentNode;
myProxy = proxy;
myState = State.NOT_RUNNING;
}
@NotNull
public String getId() {
return myId;
}
@Nullable
public Node getParentNode() {
return myParentNode;
}
@NotNull
public SMTestProxy getProxy() {
return myProxy;
}
@NotNull
public State getState() {
return myState;
}
public void setState(@NotNull State newState, @NotNull GeneralIdBasedToSMTRunnerEventsConvertor convertor) {
// allowed sequences: NOT_RUNNING -> RUNNING or IGNORED; RUNNING -> FINISHED, FAILED or IGNORED; FINISHED <-> FAILED; IGNORED -> FINISHED
if (myState == State.NOT_RUNNING && newState != State.RUNNING && newState != State.IGNORED ||
myState == State.RUNNING && newState != State.FINISHED && newState != State.FAILED && newState != State.IGNORED ||
myState == State.FINISHED && newState != State.FAILED ||
myState == State.FAILED && newState != State.FINISHED ||
myState == State.IGNORED && newState != State.FINISHED) {
convertor.logProblem("Illegal state change [" + myState + " -> " + newState + "]: " + toString(), false);
}
if (myState.ordinal() < newState.ordinal()) {
// for example State.FINISHED comes later than State.FAILED, do not update state in this case
myState = newState;
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Node node = (Node)o;
return myId == node.myId;
}
@Override
public int hashCode() {
return myId.hashCode();
}
@Override
public String toString() {
return "{" +
"id=" + myId +
", parentId=" + (myParentNode != null ? myParentNode.getId() : "<undefined>") +
", name='" + myProxy.getName() +
"', isSuite=" + myProxy.isSuite() +
", state=" + myState +
'}';
}
}
}
| apache-2.0 |
OpenXIP/xip-libraries | src/extern/inventor/apps/converters/ivdowngrade/downgradeToV1.c++ | 17239 | /*
*
* Copyright (C) 2000 Silicon Graphics, Inc. All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Further, this software is distributed without any warranty that it is
* free of the rightful claim of any third person regarding infringement
* or the like. Any license provided herein, whether implied or
* otherwise, applies only to this software file. Patent licenses, if
* any, provided herein do not apply to combinations of this program with
* other software, or any other product whatsoever.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
* Mountain View, CA 94043, or:
*
* http://www.sgi.com
*
* For further information regarding this notice, see:
*
* http://oss.sgi.com/projects/GenInfo/NoticeExplan/
*
*/
/*
* Copyright (C) 1993-94 Silicon Graphics, Inc.
*
_______________________________________________________________________
______________ S I L I C O N G R A P H I C S I N C . ____________
|
| $Revision: 1.3 $
|
| Author(s) : David Mott
|
______________ S I L I C O N G R A P H I C S I N C . ____________
_______________________________________________________________________
*/
#include <stdio.h>
#include <assert.h>
#include <Inventor/SbDict.h>
#include <Inventor/SoDB.h>
#include <Inventor/SoInteraction.h>
#include <Inventor/SoInput.h>
#include <Inventor/actions/SoSearchAction.h>
#include <Inventor/actions/SoWriteAction.h>
#include <Inventor/errors/SoReadError.h>
#include <Inventor/manips/SoCenterballManip.h>
#include <Inventor/manips/SoDirectionalLightManip.h>
#include <Inventor/manips/SoHandleBoxManip.h>
#include <Inventor/manips/SoJackManip.h>
#include <Inventor/manips/SoPointLightManip.h>
#include <Inventor/manips/SoSpotLightManip.h>
#include <Inventor/manips/SoTabBoxManip.h>
#include <Inventor/manips/SoTrackballManip.h>
#include <Inventor/manips/SoTransformBoxManip.h>
#include <Inventor/manips/SoTransformManip.h>
#include <Inventor/nodes/SoAnnotation.h>
#include <Inventor/nodes/SoAntiSquish.h>
#include <Inventor/nodes/SoAsciiText.h>
#include <Inventor/nodes/SoBlinker.h>
#include <Inventor/nodes/SoClipPlane.h>
#include <Inventor/nodes/SoColorIndex.h>
#include <Inventor/nodes/SoDirectionalLight.h>
#include <Inventor/nodes/SoDrawStyle.h>
#include <Inventor/nodes/SoEnvironment.h>
#include <Inventor/nodes/SoFile.h>
#include <Inventor/nodes/SoFont.h>
#include <Inventor/nodes/SoFontStyle.h>
#include <Inventor/nodes/SoIndexedTriangleStripSet.h>
#include <Inventor/nodes/SoLabel.h>
#include <Inventor/nodes/SoLevelOfDetail.h>
#include <Inventor/nodes/SoLOD.h>
#include <Inventor/nodes/SoPendulum.h>
#include <Inventor/nodes/SoPickStyle.h>
#include <Inventor/nodes/SoPointLight.h>
#include <Inventor/nodes/SoRotor.h>
#include <Inventor/nodes/SoSelection.h>
#include <Inventor/nodes/SoSeparator.h>
#include <Inventor/nodes/SoShapeHints.h>
#include <Inventor/nodes/SoShuttle.h>
#include <Inventor/nodes/SoSpotLight.h>
#include <Inventor/nodes/SoSurroundScale.h>
#include <Inventor/nodes/SoSwitch.h>
#include <Inventor/nodes/SoText3.h>
#include <Inventor/nodes/SoTexture2.h>
#include <Inventor/nodes/SoTextureCoordinateDefault.h>
#include <Inventor/nodes/SoTextureCoordinatePlane.h>
#include <Inventor/nodes/SoTransformSeparator.h>
#include <Inventor/nodes/SoVertexProperty.h>
#include <Inventor/nodes/SoWWWAnchor.h>
#include <Inventor/nodes/SoWWWInline.h>
#include "SoEnvironmentV1.h"
#include "SoPickStyleV1.h"
#include "SoShapeHintsV1.h"
#include "SoTextureCoordinatePlaneV1.h"
#include "SoIndexedTriangleMeshV1.h"
#include "Util.h"
#include "SoAsciiTextV2.h"
#include "SoVertexPropertyV2.h"
SbDict *version2Only = NULL;
SbDict *needsDowngrading = NULL;
#define DICT_ENTER(dict,nodeClass,dictData) \
(dict->enter((unsigned long) nodeClass::getClassTypeId().getName().getString(), (void *) dictData))
#define DICT_FIND(dict,node,dictData) \
(dict->find((unsigned long) node->getTypeId().getName().getString(), dictData))
typedef SoNode *DowngradeFunc(SoNode *n);
// Extern
extern SoNode *downgradeVertexShape(SoVertexShape *vs);
// Forward reference.
SoNode *downgradeCopy(SoNode *node);
// We do not want the fields or the children of the Separator (v1.0 did not
// have fields, and the children we'll copy later)
SoNode *
downgradeSep(SoNode *)
{
return new SoSeparator;
}
// We do not want the fields or the children of the Selection (v1.0 did not
// have fields, and the children we'll copy later)
SoNode *
downgradeSelection(SoNode *)
{
return new SoSelection;
}
SoNode *
downgradeInline(SoNode *node2)
{
SoGroup *group = new SoGroup;
SoWWWInline *winline = (SoWWWInline *) node2;
if (winline->getChildData() != NULL)
group->addChild(downgradeCopy(winline->getChildData()));
return group;
}
SoNode *
downgradeFontStyle(SoNode *node2)
{
// Downgrade to SoFont
SoFontStyle *fs = (SoFontStyle *) node2;
SoFont *f = new SoFont;
f->name = fs->getFontName();
COPY_FIELD(f, fs, size); // f->size = fs->size
return f;
}
SoNode *
downgradeAsciiText(SoNode *node2)
{
return SoAsciiTextV2::convert((SoAsciiText *) node2);
}
SoNode *
downgradeVertexProperty(SoNode *node2)
{
// Have to downgrade the group that gets returned
// since SoPackedColor (a field of VertexProperty) must get downgraded
// after it is convert to the PackedColor node.
SoGroup *g = SoVertexPropertyV2::convert((SoVertexProperty *) node2);
return downgradeCopy(g);
}
// LOD and LevelOfDetail can be written as SoSwitch.
// Set whichChild to 0 to show highest detail.
SoNode *
downgradeSwitch(SoNode *)
{
SoSwitch *sw = new SoSwitch;
sw->whichChild = 0;
return sw;
}
SoNode *
downgradeDrawStyle(SoNode *node2)
{
// v1.0 does not have a pointSize field. Hide it.
SoDrawStyle *ds = (SoDrawStyle *) node2;
ds->pointSize.setDefault(TRUE);
return ds;
}
SoNode *
downgradeLight(SoNode *node2)
{
// v1.0 does not have a pointSize field. Hide it.
SoLight *light = (SoLight *) node2;
light->on.setDefault(TRUE);
return light;
}
SoNode *
downgradeEnv(SoNode *node2)
{
return SoEnvironmentV1::downgrade((SoEnvironment *) node2);
}
SoNode *
downgradePickStyle(SoNode *node2)
{
return SoPickStyleV1::downgrade((SoPickStyle *) node2);
}
SoNode *
downgradeText3(SoNode *node2)
{
SoText3 *t = (SoText3 *) node2;
if (t->parts.isDefault())
t->parts.setValue(SoText3::FRONT); // 2.0 default means FRONT in 1.0
return t;
}
SoNode *
downgradeShapeHints(SoNode *node2)
{
return SoShapeHintsV1::downgrade((SoShapeHints *) node2);
}
SoNode *
downgradeTexture2(SoNode *node2)
{
// v1.0 does not have an image field. Hide it.
SoTexture2 *t = (SoTexture2 *) node2;
t->image.setDefault(TRUE);
return t;
}
SoNode *
downgradeTexCoordPlane(SoNode *node2)
{
return SoTextureCoordinatePlaneV1::downgrade((SoTextureCoordinatePlane *) node2);
}
SoNode *
downgradeIdxTriStripSet(SoNode *node2)
{
return SoIndexedTriangleMeshV1::downgrade((SoIndexedTriangleStripSet *) node2);
}
//
// Traverse the scene graph, converting from 2.0 to 1.0.
// This is where the work is done.
//
SoNode *
downgradeCopy(SoNode *node)
{
SoNode *nodeCopy = NULL;
SoField *altRep;
void *dictData;
// If not an Inventor built in node, do not try to copy.
// Simply return a label.
if (! node->getIsBuiltIn()) {
// If there is an alternate representation, use it
if ((altRep = node->getField("alternateRep")) != NULL &&
(altRep->isOfType(SoSFNode::getClassTypeId()))) {
nodeCopy = downgradeCopy(((SoSFNode *) altRep)->getValue());
nodeCopy->setName("alternateRep");
}
else {
SoLabel *l = new SoLabel;
l->label = node->getTypeId().getName();
nodeCopy = l;
}
}
// If not a group, "copy" and return
else if (! node->isOfType(SoGroup::getClassTypeId())) {
// Is this a version 2.0 only node?
if (DICT_FIND(version2Only, node, dictData) ) {
SoLabel *l = new SoLabel;
l->label = node->getTypeId().getName();
nodeCopy = l;
}
// Is this a nodekit?
else if ( node->isOfType(SoBaseKit::getClassTypeId())) {
// [1] Create a vanilla group instead:
SoGroup *grp = new SoGroup;
// [2] Copy the children,
SoChildList *kids = ((SoBaseKit *) node)->getChildren();
if (kids != NULL) {
for (int i = 0; i < kids->getLength(); i++)
grp->addChild( downgradeCopy( (*kids)[i] ) );
}
// [3] Add a first child indicating that this used to be a kit.
SoLabel *l = new SoLabel;
l->label = node->getTypeId().getName();
grp->insertChild(l,0);
// [4] assign grp to be the nodeCopy
nodeCopy = grp;
}
// Or does it need downgrading?
else if (DICT_FIND(needsDowngrading, node, dictData)) {
// downgrade! (dictData was set in DICT_FIND)
DowngradeFunc *f = (DowngradeFunc *) dictData;
nodeCopy = (*f)(node);
}
// Is it a vertex shape? Need to remove the vertexProperty field if set.
else if ( node->isOfType(SoVertexShape::getClassTypeId())) {
nodeCopy = downgradeVertexShape((SoVertexShape *) node);
// If the shape was downgraded to something else, we have to convert
// that to 1.0 format.
if (nodeCopy != node)
nodeCopy = downgradeCopy(nodeCopy);
}
// If not a group, simply use this node, first disconnecting fields
// (We could copy it, but that can get expensive with large data sets.)
else {
SoFieldList list;
int num = node->getFields(list);
for (int i = 0; i < num; i++) {
if (list[i]->isConnected())
list[i]->disconnect();
}
nodeCopy = node;
}
}
// For a group, copy the node, then traverse the children
else {
SoGroup *group2 = (SoGroup *) node; // version 2.0
SoGroup *group1; // version 1.0
// Is this a version 2.0 only node?
if (DICT_FIND(version2Only, node, dictData)) {
// Treat it like a group
group1 = new SoGroup;
}
// Or does it need downgrading?
else if (DICT_FIND(needsDowngrading, node, dictData)) {
// downgrade! (dictData was set in DICT_FIND)
DowngradeFunc *f = (DowngradeFunc *) dictData;
group1 = (SoGroup *)(*f)(node);
}
// Else use the group. We have to downgrade its children.
else {
nodeCopy = group2;
for (int i = 0; i < group2->getNumChildren(); i++) {
SoNode *n = downgradeCopy(group2->getChild(i));
if (n != group2->getChild(i))
group2->replaceChild(i, n);
}
return nodeCopy;
}
// Traverse to copy the children from group2 to group1
for (int i = 0; i < group2->getNumChildren(); i++)
group1->addChild(downgradeCopy(group2->getChild(i)));
nodeCopy = group1;
}
return nodeCopy;
}
//
// This routine searches for and expands all SoFile nodes in the
// given scene graph. It does this by making all the children of a
// SoFile node the children of its parent.
// (Code for this function taken from ivcat)
//
void
expandFileNodes(SoNode *&root)
{
//
// Special case: if root is a file node, replace it with a group.
//
if (root->isOfType(SoFile::getClassTypeId())) {
SoFile *f = (SoFile *)root;
SoGroup *g = f->copyChildren();
root->unref();
root = g;
root->ref();
}
// Search for all file nodes
SoSearchAction sa;
sa.setType(SoFile::getClassTypeId());
sa.setInterest(SoSearchAction::FIRST);
sa.setSearchingAll(TRUE);
sa.apply(root);
// We'll keep on searching until there are no more file nodes
// left. We don't search for all file nodes at once, because we
// need to modify the scene graph, and so the paths returned may
// be truncated (if there are several file nodes under a group, if
// there are files within files, etc). Dealing properly with that
// is complicated-- it is easier (but slower) to just reapply the
// search until it fails.
// We need an SoFullPath here because we're searching node kit
// contents.
SoFullPath *p = (SoFullPath *) sa.getPath();
while (p != NULL) {
SoGroup *parent = (SoGroup *)p->getNodeFromTail(1);
assert(parent != NULL);
SoFile *file = (SoFile *)p->getTail();
// If the filename includes a directory path, add the directory name
// to the list of directories where to look for input files
const char* filename = file->name.getValue().getString();
const char *slashPtr;
char *searchPath = NULL;
if ((slashPtr = strrchr(filename, '/')) != NULL) {
searchPath = strdup(filename);
searchPath[slashPtr - filename] = '\0';
SoInput::addDirectoryFirst(searchPath);
}
int fileIndex = p->getIndexFromTail(0);
assert(fileIndex != -1);
// Now, add group of all children to file's parent's list of children,
// right after the file node:
SoGroup *fileGroup = file->copyChildren();
fileGroup->ref();
if (fileGroup != NULL) {
parent->insertChild(fileGroup, fileIndex+1);
}
else {
// So we can at least see where the file node contents were
// supposed to go.
parent->insertChild(new SoGroup, fileIndex+1);
}
// And expand the child node from the group.
// Note that if the File node is multiply instanced,
// the groups will not be instanced, but the children of the
// groups will be.
parent->removeChild(fileIndex);
sa.apply(root);
p = (SoFullPath *) sa.getPath();
}
}
////////////////////////////////////////////////////////////////////////
//
// Down-grade the passed scene to 1.0 format.
//
SoNode *
downgradeToV1(SoNode *n)
//
////////////////////////////////////////////////////////////////////////
{
// "Copy" the scene graph from 2.0 to 1.0 format.
// That is, traverse the graph reconstrucing a new one.
// When a node is the same format in 1.0 and 2.0, we simply copy it.
// When the format has changed, we build the 1.0 counterpart.
// First, set up a dictionary of nodes that exist in 2.0,
// but have no 1.0 counterpart.
version2Only = new SbDict;
#define VERSION_2_DICT_ENTER(nodeClass) \
DICT_ENTER(version2Only, nodeClass, NULL)
VERSION_2_DICT_ENTER(SoAnnotation);
VERSION_2_DICT_ENTER(SoAntiSquish);
VERSION_2_DICT_ENTER(SoBlinker);
VERSION_2_DICT_ENTER(SoClipPlane);
VERSION_2_DICT_ENTER(SoColorIndex);
VERSION_2_DICT_ENTER(SoPendulum);
VERSION_2_DICT_ENTER(SoRotor);
VERSION_2_DICT_ENTER(SoShuttle);
VERSION_2_DICT_ENTER(SoSurroundScale);
VERSION_2_DICT_ENTER(SoTextureCoordinateDefault);
VERSION_2_DICT_ENTER(SoTransformSeparator);
VERSION_2_DICT_ENTER(SoCenterballManip);
VERSION_2_DICT_ENTER(SoDirectionalLightManip);
VERSION_2_DICT_ENTER(SoHandleBoxManip);
VERSION_2_DICT_ENTER(SoJackManip);
VERSION_2_DICT_ENTER(SoPointLightManip);
VERSION_2_DICT_ENTER(SoSpotLightManip);
VERSION_2_DICT_ENTER(SoTabBoxManip);
VERSION_2_DICT_ENTER(SoTrackballManip);
VERSION_2_DICT_ENTER(SoTransformBoxManip);
VERSION_2_DICT_ENTER(SoTransformManip);
// And a dictionary of nodes that need downgrading
needsDowngrading = new SbDict;
#define DOWNGRADE_DICT_ENTER(nodeClass, func) \
DICT_ENTER(needsDowngrading, nodeClass, func)
DOWNGRADE_DICT_ENTER(SoSelection, downgradeSelection);
DOWNGRADE_DICT_ENTER(SoSeparator, downgradeSep);
DOWNGRADE_DICT_ENTER(SoWWWAnchor, downgradeSep);
DOWNGRADE_DICT_ENTER(SoWWWInline, downgradeInline);
DOWNGRADE_DICT_ENTER(SoLevelOfDetail, downgradeSwitch);
DOWNGRADE_DICT_ENTER(SoLOD, downgradeSwitch);
DOWNGRADE_DICT_ENTER(SoDirectionalLight, downgradeLight);
DOWNGRADE_DICT_ENTER(SoPointLight, downgradeLight);
DOWNGRADE_DICT_ENTER(SoSpotLight, downgradeLight);
DOWNGRADE_DICT_ENTER(SoDrawStyle, downgradeDrawStyle);
DOWNGRADE_DICT_ENTER(SoEnvironment, downgradeEnv);
DOWNGRADE_DICT_ENTER(SoPickStyle, downgradePickStyle);
DOWNGRADE_DICT_ENTER(SoText3, downgradeText3);
DOWNGRADE_DICT_ENTER(SoShapeHints, downgradeShapeHints);
DOWNGRADE_DICT_ENTER(SoTexture2, downgradeTexture2);
DOWNGRADE_DICT_ENTER(SoTextureCoordinatePlane, downgradeTexCoordPlane);
DOWNGRADE_DICT_ENTER(SoIndexedTriangleStripSet, downgradeIdxTriStripSet);
DOWNGRADE_DICT_ENTER(SoFontStyle, downgradeFontStyle);
DOWNGRADE_DICT_ENTER(SoAsciiText, downgradeAsciiText);
DOWNGRADE_DICT_ENTER(SoVertexProperty, downgradeVertexProperty);
// Traverse the graph
return downgradeCopy(n);
}
| apache-2.0 |
dropwizard/dropwizard | dropwizard-jersey/src/test/java/io/dropwizard/jersey/validation/JerseyViolationExceptionTest.java | 877 | package io.dropwizard.jersey.validation;
import org.glassfish.jersey.process.Inflector;
import org.glassfish.jersey.server.model.Invocable;
import org.junit.jupiter.api.Test;
import javax.validation.ConstraintViolation;
import javax.ws.rs.core.Request;
import java.util.Collections;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.mockito.Mockito.mock;
class JerseyViolationExceptionTest {
@Test
void testAccessors() {
final Set<? extends ConstraintViolation<?>> violations = Collections.emptySet();
@SuppressWarnings("unchecked")
final Inflector<Request, ?> inf = mock(Inflector.class);
final Invocable inv = Invocable.create(inf);
final JerseyViolationException test = new JerseyViolationException(violations, inv);
assertSame(inv, test.getInvocable());
}
}
| apache-2.0 |
naveenbhaskar/gocd | config/config-api/src/main/java/com/thoughtworks/go/config/Users.java | 1628 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.domain.BaseCollection;
import com.thoughtworks.go.domain.ConfigErrors;
import java.util.Collection;
@ConfigTag("users")
@ConfigCollection(RoleUser.class)
public class Users extends BaseCollection<RoleUser> implements Validatable {
private final ConfigErrors configErrors = new ConfigErrors();
static Users users(RoleUser... users) {
Users usersList = new Users();
for (RoleUser user : users) {
usersList.add(user);
}
return usersList;
}
static Users users(Collection<RoleUser> users) {
Users usersList = new Users();
for (RoleUser user : users) {
usersList.add(user);
}
return usersList;
}
public void validate(ValidationContext validationContext) {
}
public ConfigErrors errors() {
return configErrors;
}
public void addError(String fieldName, String message) {
configErrors.add(fieldName, message);
}
}
| apache-2.0 |
micro/go-plugins | config/source/configmap/configmap.go | 2315 | // Package configmap config is an interface for dynamic configuration.
package configmap
import (
"fmt"
"github.com/micro/go-micro/v2/config/source"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
type configmap struct {
opts source.Options
client *kubernetes.Clientset
cerr error
name string
namespace string
configPath string
}
// Predefined variables
var (
DefaultName = "micro"
DefaultConfigPath = ""
DefaultNamespace = "default"
)
func (k *configmap) Read() (*source.ChangeSet, error) {
if k.cerr != nil {
return nil, k.cerr
}
cmp, err := k.client.CoreV1().ConfigMaps(k.namespace).Get(k.name, v1.GetOptions{})
if err != nil {
return nil, err
}
data := makeMap(cmp.Data)
b, err := k.opts.Encoder.Encode(data)
if err != nil {
return nil, fmt.Errorf("error reading source: %v", err)
}
cs := &source.ChangeSet{
Format: k.opts.Encoder.String(),
Source: k.String(),
Data: b,
Timestamp: cmp.CreationTimestamp.Time,
}
cs.Checksum = cs.Sum()
return cs, nil
}
// Write is unsupported
func (k *configmap) Write(cs *source.ChangeSet) error {
return nil
}
func (k *configmap) String() string {
return "configmap"
}
func (k *configmap) Watch() (source.Watcher, error) {
if k.cerr != nil {
return nil, k.cerr
}
w, err := newWatcher(k.name, k.namespace, k.client, k.opts)
if err != nil {
return nil, err
}
return w, nil
}
// NewSource is a factory function
func NewSource(opts ...source.Option) source.Source {
var (
options = source.NewOptions(opts...)
name = DefaultName
configPath = DefaultConfigPath
namespace = DefaultNamespace
)
prefix, ok := options.Context.Value(prefixKey{}).(string)
if ok {
name = prefix
}
cfg, ok := options.Context.Value(configPathKey{}).(string)
if ok {
configPath = cfg
}
sname, ok := options.Context.Value(nameKey{}).(string)
if ok {
name = sname
}
ns, ok := options.Context.Value(namespaceKey{}).(string)
if ok {
namespace = ns
}
// TODO handle if the client fails what to do current return does not support error
client, err := getClient(configPath)
return &configmap{
cerr: err,
client: client,
opts: options,
name: name,
configPath: configPath,
namespace: namespace,
}
}
| apache-2.0 |
netbear/CloudAnts | src/java/voldemort/utils/pool/ResourceFactory.java | 1061 | package voldemort.utils.pool;
/**
* Basic interface for poolable Object Factory
*
*
*/
public interface ResourceFactory<K, V> {
/**
* Create the given resource for the key. This is called once for each
* resource to create it.
*
* @param key The key
* @return The created resource
* @throws Exception
*/
V create(K key) throws Exception;
/**
* Destroy the given resource. This is called only when validate() returns
* false for the resource or the pool is closed.
*
* @param key The key of the resource
* @param obj The resource
*/
void destroy(K key, V obj) throws Exception;
/**
* Check that the given resource is valid. This is called once on every
* checkout, so that the checked out resource is guaranteed to be valid
* (though it could immediately become invalid).
*
* @param key The key of the resource
* @param value The resource
* @return True iff the resource is valid
*/
boolean validate(K key, V value);
}
| apache-2.0 |
luisadeva/shoppingList | public_html/js/app/services.js | 3492 | var shoppingListServices = angular.module('shoppingListServices', []);
shoppingListServices.factory('shoppingListServicesBD', [
function(){
return {
initBD: function (callback) {
if (!(window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB)) {
alert("ERROR: Not support Data Base");
} else {
myStore = new IDBStore(
{
storeName: 'shoppingList',
storePrefix: 'IDBWrapper-',
dbVersion: 13,
keyPath: 'id',
autoIncrement: true,
indexes: [
{name: 'lista', keyPath: 'id', unique: true, multiEntry: false},
],
onStoreReady: function(){
console.log("Data base created!");
//init BD, only DEV enviroment
//myStore.clear();
//for (var i in listasPrueba) {
// myStore.put(listasPrueba[i], function (id) {console.log("List created: " + id); listasPrueba[i].listaId = id;}, function (error) {console.log(error)});
//}
//myStore.put(listasPrueba, callback, function (error) {console.log(error)});
callback();
},
onError: function(error){ console.log("Error when init DB: " + error); throw error;}
}
)
}
},
getAllLists : function (onsuccess) {
return myStore.getAll(onsuccess);
},
saveLists: function (lists, callback){
myStore.put(lists,
function (){
console.log("List saved");
callback();
} ,
function (error) {console.log(error); throw error;});
},
removeAll: function (callback) {
myStore.clear();
callback();
}
};
}]);
shoppingListServices.factory('shoppingListServicesWebRtc', [
function(){
var peer;
var conn;
return {
init: function (callback) {
peer = new Peer('', {host: 'server-rtc.herokuapp.com', port:'80', debug:'3'});
peer.on('open', function(id) {
console.log('My peer ID is: ' + id);
callback(id);
});
},
connect: function (idPeer) {
conn = peer.connect(idPeer);
// Receive messages
conn.on('data', function(data) {
console.log('Received', data);
});
// Send messages
conn.send('Hello!');
},
on: function(nameEvent, callback){
peer.on(nameEvent, callback);
}
};
}]); | apache-2.0 |
stephane-martin/salt-debian-packaging | salt-2016.3.2/salt/modules/win_dacl.py | 28918 | # -*- coding: utf-8 -*-
'''
Manage DACLs on Windows
:depends: - winreg Python module
'''
# Import python libs
from __future__ import absolute_import
import os
import logging
# TODO: Figure out the exceptions that could be raised and properly catch
# them instead of a bare except that catches any exception at all
# may also need to add the ability to take ownership of an object to set
# permissions if the minion is running as a user and not LOCALSYSTEM
# Import salt libs
import salt.utils
from salt.exceptions import CommandExecutionError
from salt.ext.six import string_types
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
# Import third party libs
try:
import salt.ext.six.moves.winreg # pylint: disable=redefined-builtin,no-name-in-module,import-error
import win32security
import ntsecuritycon
HAS_WINDOWS_MODULES = True
except ImportError:
HAS_WINDOWS_MODULES = False
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'win_dacl'
class daclConstants(object):
'''
dacl constants used throughout the module
'''
# Definition in ntsecuritycon is incorrect (does not match winnt.h). The version
# in ntsecuritycon has the extra bits 0x200 enabled.
# Note that you when you set this permission what you'll generally get back is it
# ORed with 0x200 (SI_NO_ACL_PROTECT), which is what ntsecuritycon incorrectly defines.
def __init__(self):
self.FILE_ALL_ACCESS = (ntsecuritycon.STANDARD_RIGHTS_REQUIRED | ntsecuritycon.SYNCHRONIZE | 0x1ff)
self.hkeys_security = {
'HKEY_LOCAL_MACHINE': 'MACHINE',
'HKEY_USERS': 'USERS',
'HKEY_CURRENT_USER': 'CURRENT_USER',
'HKEY_CLASSES_ROOT': 'CLASSES_ROOT',
'MACHINE': 'MACHINE',
'USERS': 'USERS',
'CURRENT_USER': 'CURRENT_USER',
'CLASSES_ROOT': 'CLASSES_ROOT',
'HKLM': 'MACHINE',
'HKU': 'USERS',
'HKCU': 'CURRENT_USER',
'HKCR': 'CLASSES_ROOT',
}
self.rights = {
win32security.SE_REGISTRY_KEY: {
'READ': {
'BITS': salt.ext.six.moves.winreg.KEY_READ,
'TEXT': 'read'},
'FULLCONTROL': {
'BITS': salt.ext.six.moves.winreg.KEY_ALL_ACCESS,
'TEXT': 'full control'}
},
win32security.SE_FILE_OBJECT: {
'READ': {
'BITS': ntsecuritycon.FILE_GENERIC_READ,
'TEXT': 'read'},
'WRITE': {
'BITS': ntsecuritycon.FILE_GENERIC_WRITE,
'TEXT': 'write'},
'READ&EXECUTE': {
'BITS': ntsecuritycon.FILE_GENERIC_EXECUTE |
ntsecuritycon.FILE_GENERIC_READ,
'TEXT': 'read and execute'},
'MODIFY': {
'BITS': ntsecuritycon.FILE_GENERIC_WRITE |
ntsecuritycon.FILE_GENERIC_READ |
ntsecuritycon.FILE_GENERIC_EXECUTE |
ntsecuritycon.DELETE,
'TEXT': 'modify'},
'FULLCONTROL': {
'BITS': self.FILE_ALL_ACCESS,
'TEXT': 'full control'}
}
}
self.validAceTypes = {
'ALLOW': {'TEXT': 'allowed', 'BITS': 0},
'DENY': {'TEXT': 'denied', 'BITS': 1}}
self.validPropagations = {
win32security.SE_REGISTRY_KEY: {
'KEY': {
'TEXT': 'this key only',
'BITS': win32security.NO_INHERITANCE},
'KEY&SUBKEYS': {
'TEXT': 'this key and subkeys',
'BITS': win32security.CONTAINER_INHERIT_ACE},
'SUBKEYS': {
'TEXT': 'subkeys only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE},
'THIS KEY ONLY': {
'TEXT': 'this key only',
'BITS': win32security.NO_INHERITANCE},
'THIS KEY AND SUBKEYS': {
'TEXT': 'this key and subkeys',
'BITS': win32security.CONTAINER_INHERIT_ACE},
'SUBKEYS ONLY': {
'TEXT': 'subkeys only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE}
},
win32security.SE_FILE_OBJECT: {
'FILE': {
'TEXT': 'this file/folder only',
'BITS': win32security.NO_INHERITANCE},
'FOLDER': {
'TEXT': 'this file/folder only',
'BITS': win32security.NO_INHERITANCE},
'FOLDER&SUBFOLDERS&FILES': {
'TEXT': 'this folder, subfolders, and files',
'BITS': win32security.CONTAINER_INHERIT_ACE |
win32security.OBJECT_INHERIT_ACE},
'FOLDER&SUBFOLDERS': {
'TEXT': 'this folder and subfolders',
'BITS': win32security.CONTAINER_INHERIT_ACE},
'FOLDER&FILES': {
'TEXT': 'this folder and files',
'BITS': win32security.OBJECT_INHERIT_ACE},
'SUBFOLDERS&FILES': {
'TEXT': 'subfolders and files',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE |
win32security.OBJECT_INHERIT_ACE},
'SUBFOLDERS': {
'TEXT': 'subfolders only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE},
'FILES': {
'TEXT': 'files only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.OBJECT_INHERIT_ACE},
'THIS FILE ONLY': {
'TEXT': 'this file/folder only',
'BITS': win32security.NO_INHERITANCE},
'THIS FOLDER ONLY': {
'TEXT': 'this file/folder only',
'BITS': win32security.NO_INHERITANCE},
'THIS FOLDER, SUBFOLDERS, AND FILES': {
'TEXT': 'this folder, subfolders, and files',
'BITS': win32security.CONTAINER_INHERIT_ACE |
win32security.OBJECT_INHERIT_ACE},
'THIS FOLDER AND SUBFOLDERS': {
'TEXT': 'this folder and subfolders',
'BITS': win32security.CONTAINER_INHERIT_ACE},
'THIS FOLDER AND FILES': {
'TEXT': 'this folder and files',
'BITS': win32security.OBJECT_INHERIT_ACE},
'SUBFOLDERS AND FILES': {
'TEXT': 'subfolders and files',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE |
win32security.OBJECT_INHERIT_ACE},
'SUBFOLDERS ONLY': {
'TEXT': 'subfolders only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.CONTAINER_INHERIT_ACE},
'FILES ONLY': {
'TEXT': 'files only',
'BITS': win32security.INHERIT_ONLY_ACE |
win32security.OBJECT_INHERIT_ACE}
}
}
self.reflection_mask = {
True: salt.ext.six.moves.winreg.KEY_ALL_ACCESS,
False: salt.ext.six.moves.winreg.KEY_ALL_ACCESS | salt.ext.six.moves.winreg.KEY_WOW64_64KEY,
}
self.objectType = {
'FILE': win32security.SE_FILE_OBJECT,
'DIRECTORY': win32security.SE_FILE_OBJECT,
'REGISTRY': win32security.SE_REGISTRY_KEY}
def getObjectTypeBit(self, t):
'''
returns the bit value of the string object type
'''
if isinstance(t, string_types):
t = t.upper()
try:
return self.objectType[t]
except KeyError:
raise CommandExecutionError((
'Invalid object type "{0}". It should be one of the following: {1}'
).format(t, ', '.join(self.objectType)))
else:
return t
def getSecurityHkey(self, s):
'''
returns the necessary string value for an HKEY for the win32security module
'''
try:
return self.hkeys_security[s]
except KeyError:
raise CommandExecutionError((
'No HKEY named "{0}". It should be one of the following: {1}'
).format(s, ', '.join(self.hkeys_security)))
def getPermissionBit(self, t, m):
'''
returns a permission bit of the string permission value for the specified object type
'''
try:
if isinstance(m, string_types):
return self.rights[t][m]['BITS']
else:
return m
except KeyError:
raise CommandExecutionError((
'No right "{0}". It should be one of the following: {1}')
.format(m, ', '.join(self.rights[t])))
def getPermissionText(self, t, m):
'''
returns the permission textual representation of a specified permission bit/object type
'''
try:
return self.rights[t][m]['TEXT']
except KeyError:
raise CommandExecutionError((
'No right "{0}". It should be one of the following: {1}')
.format(m, ', '.join(self.rights[t])))
def getAceTypeBit(self, t):
'''
returns the acetype bit of a text value
'''
try:
return self.validAceTypes[t]['BITS']
except KeyError:
raise CommandExecutionError((
'No ACE type "{0}". It should be one of the following: {1}'
).format(t, ', '.join(self.validAceTypes)))
def getAceTypeText(self, t):
'''
returns the textual representation of a acetype bit
'''
try:
return self.validAceTypes[t]['TEXT']
except KeyError:
raise CommandExecutionError((
'No ACE type "{0}". It should be one of the following: {1}'
).format(t, ', '.join(self.validAceTypes)))
def getPropagationBit(self, t, p):
'''
returns the propagation bit of a text value
'''
try:
return self.validPropagations[t][p]['BITS']
except KeyError:
raise CommandExecutionError((
'No propagation type of "{0}". It should be one of the following: {1}'
).format(p, ', '.join(self.validPropagations[t])))
def getPropagationText(self, t, p):
'''
returns the textual representation of a propagation bit
'''
try:
return self.validPropagations[t][p]['TEXT']
except KeyError:
raise CommandExecutionError((
'No propagation type of "{0}". It should be one of the following: {1}'
).format(p, ', '.join(self.validPropagations[t])))
def processPath(self, path, objectType):
'''
processes a path/object type combo and returns:
registry types with the correct HKEY text representation
files/directories with environment variables expanded
'''
if objectType == win32security.SE_REGISTRY_KEY:
splt = path.split("\\")
hive = self.getSecurityHkey(splt.pop(0).upper())
splt.insert(0, hive)
path = r'\\'.join(splt)
else:
path = os.path.expandvars(path)
return path
def _getUserSid(user):
'''
return a state error dictionary, with 'sid' as a field if it could be returned
if user is None, sid will also be None
'''
ret = {}
try:
sid = win32security.LookupAccountName('', user)[0] if user else None
ret['result'] = True
ret['sid'] = sid
except Exception as e:
ret['result'] = False
ret['comment'] = 'Unable to obtain the security identifier for {0}. The exception was {1}.'.format(
user, e)
return ret
def __virtual__():
'''
Only works on Windows systems
'''
if salt.utils.is_windows() and HAS_WINDOWS_MODULES:
return __virtualname__
return (False, "Module win_dacl: module only works on Windows systems")
def _get_dacl(path, objectType):
'''
gets the dacl of a path
'''
try:
dacl = win32security.GetNamedSecurityInfo(
path, objectType, win32security.DACL_SECURITY_INFORMATION
).GetSecurityDescriptorDacl()
except Exception:
dacl = None
return dacl
def get(path, objectType, user=None):
'''
Get the acl of an object. Will filter by user if one is provided.
'''
ret = {'Path': path,
'ACLs': []}
sidRet = _getUserSid(user)
if not sidRet['result']:
return sidRet
if path and objectType:
dc = daclConstants()
objectTypeBit = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectTypeBit)
tdacl = _get_dacl(path, objectTypeBit)
if tdacl:
for counter in range(0, tdacl.GetAceCount()):
tAce = tdacl.GetAce(counter)
if not sidRet['sid'] or (tAce[2] == sidRet['sid']):
ret['ACLs'].append(_ace_to_text(tAce, objectTypeBit))
return ret
def add_ace(path, objectType, user, permission, acetype, propagation):
r'''
add an ace to an object
path: path to the object (i.e. c:\\temp\\file, HKEY_LOCAL_MACHINE\\SOFTWARE\\KEY, etc)
user: user to add
permission: permissions for the user
acetype: either allow/deny for each user/permission (ALLOW, DENY)
propagation: how the ACE applies to children for Registry Keys and Directories(KEY, KEY&SUBKEYS, SUBKEYS)
CLI Example:
.. code-block:: bash
allow domain\fakeuser full control on HKLM\\SOFTWARE\\somekey, propagate to this key and subkeys
salt 'myminion' win_dacl.add_ace 'HKEY_LOCAL_MACHINE\\SOFTWARE\\somekey' 'Registry' 'domain\fakeuser' 'FULLCONTROL' 'ALLOW' 'KEY&SUBKEYS'
'''
ret = {'result': None,
'changes': {},
'comment': ''}
if (path and user and
permission and acetype
and propagation):
if objectType.upper() == "FILE":
propagation = "FILE"
dc = daclConstants()
objectTypeBit = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectTypeBit)
user = user.strip()
permission = permission.strip().upper()
acetype = acetype.strip().upper()
propagation = propagation.strip().upper()
sidRet = _getUserSid(user)
if not sidRet['result']:
return sidRet
permissionbit = dc.getPermissionBit(objectTypeBit, permission)
acetypebit = dc.getAceTypeBit(acetype)
propagationbit = dc.getPropagationBit(objectTypeBit, propagation)
dacl = _get_dacl(path, objectTypeBit)
if dacl:
acesAdded = []
try:
if acetypebit == 0:
dacl.AddAccessAllowedAceEx(win32security.ACL_REVISION, propagationbit, permissionbit, sidRet['sid'])
elif acetypebit == 1:
dacl.AddAccessDeniedAceEx(win32security.ACL_REVISION, propagationbit, permissionbit, sidRet['sid'])
win32security.SetNamedSecurityInfo(
path, objectTypeBit, win32security.DACL_SECURITY_INFORMATION,
None, None, dacl, None)
acesAdded.append((
'{0} {1} {2} on {3}'
).format(
user, dc.getAceTypeText(acetype), dc.getPermissionText(objectTypeBit, permission),
dc.getPropagationText(objectTypeBit, propagation)))
ret['result'] = True
except Exception as e:
ret['comment'] = 'An error occurred attempting to add the ace. The error was {0}'.format(e)
ret['result'] = False
return ret
if acesAdded:
ret['changes']['Added ACEs'] = acesAdded
else:
ret['comment'] = 'Unable to obtain the DACL of {0}'.format(path)
else:
ret['comment'] = 'An empty value was specified for a required item.'
ret['result'] = False
return ret
def rm_ace(path, objectType, user, permission=None, acetype=None, propagation=None):
r'''
remove an ace to an object
path: path to the object (i.e. c:\\temp\\file, HKEY_LOCAL_MACHINE\\SOFTWARE\\KEY, etc)
user: user to remove
permission: permissions for the user
acetypes: either allow/deny for each user/permission (ALLOW, DENY)
propagation: how the ACE applies to children for Registry Keys and Directories(KEY, KEY&SUBKEYS, SUBKEYS)
If any of the optional parameters are omitted (or set to None) they act as wildcards.
CLI Example:
.. code-block:: bash
remove allow domain\fakeuser full control on HKLM\\SOFTWARE\\somekey propagated to this key and subkeys
salt 'myminion' win_dacl.rm_ace 'Registry' 'HKEY_LOCAL_MACHINE\\SOFTWARE\\somekey' 'domain\fakeuser' 'FULLCONTROL' 'ALLOW' 'KEY&SUBKEYS'
'''
ret = {'result': None,
'changes': {},
'comment': ''}
if path and user:
dc = daclConstants()
if propagation and objectType.upper() == "FILE":
propagation = "FILE"
objectTypeBit = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectTypeBit)
user = user.strip()
permission = permission.strip().upper() if permission else None
acetype = acetype.strip().upper() if acetype else None
propagation = propagation.strip().upper() if propagation else None
if check_ace(path, objectType, user, permission, acetype, propagation, True)['Exists']:
sidRet = _getUserSid(user)
if not sidRet['result']:
return sidRet
permissionbit = dc.getPermissionBit(objectTypeBit, permission) if permission else None
acetypebit = dc.getAceTypeBit(acetype) if acetype else None
propagationbit = dc.getPropagationBit(objectTypeBit, propagation) if propagation else None
dacl = _get_dacl(path, objectTypeBit)
counter = 0
acesRemoved = []
while counter < dacl.GetAceCount():
tAce = dacl.GetAce(counter)
if (tAce[0][1] & win32security.INHERITED_ACE) != win32security.INHERITED_ACE:
if tAce[2] == sidRet['sid']:
if not acetypebit or tAce[0][0] == acetypebit:
if not propagationbit or ((tAce[0][1] & propagationbit) == propagationbit):
if not permissionbit or tAce[1] == permissionbit:
dacl.DeleteAce(counter)
counter = counter - 1
acesRemoved.append(_ace_to_text(tAce, objectTypeBit))
counter = counter + 1
if acesRemoved:
try:
win32security.SetNamedSecurityInfo(
path, objectTypeBit, win32security.DACL_SECURITY_INFORMATION,
None, None, dacl, None)
ret['changes']['Removed ACEs'] = acesRemoved
ret['result'] = True
except Exception as e:
ret['result'] = False
ret['comment'] = 'Error removing ACE. The error was {0}.'.format(e)
return ret
else:
ret['comment'] = 'The specified ACE was not found on the path.'
return ret
def _ace_to_text(ace, objectType):
'''
helper function to convert an ace to a textual representation
'''
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
try:
userSid = win32security.LookupAccountSid('', ace[2])
if userSid[1]:
userSid = '{1}\\{0}'.format(userSid[0], userSid[1])
else:
userSid = '{0}'.format(userSid[0])
except Exception:
userSid = win32security.ConvertSidToStringSid(ace[2])
tPerm = ace[1]
tAceType = ace[0][0]
tProps = ace[0][1]
tInherited = ''
for x in dc.validAceTypes:
if dc.validAceTypes[x]['BITS'] == tAceType:
tAceType = dc.validAceTypes[x]['TEXT']
break
for x in dc.rights[objectType]:
if dc.rights[objectType][x]['BITS'] == tPerm:
tPerm = dc.rights[objectType][x]['TEXT']
break
if (tProps & win32security.INHERITED_ACE) == win32security.INHERITED_ACE:
tInherited = '[Inherited]'
tProps = (tProps ^ win32security.INHERITED_ACE)
for x in dc.validPropagations[objectType]:
if dc.validPropagations[objectType][x]['BITS'] == tProps:
tProps = dc.validPropagations[objectType][x]['TEXT']
break
return ((
'{0} {1} {2} on {3} {4}'
).format(userSid, tAceType, tPerm, tProps, tInherited))
def _set_dacl_inheritance(path, objectType, inheritance=True, copy=True, clear=False):
'''
helper function to set the inheritance
'''
ret = {'result': False,
'comment': '',
'changes': {}}
if path:
try:
sd = win32security.GetNamedSecurityInfo(path, objectType, win32security.DACL_SECURITY_INFORMATION)
tdacl = sd.GetSecurityDescriptorDacl()
if inheritance:
if clear:
counter = 0
removedAces = []
while counter < tdacl.GetAceCount():
tAce = tdacl.GetAce(counter)
if (tAce[0][1] & win32security.INHERITED_ACE) != win32security.INHERITED_ACE:
tdacl.DeleteAce(counter)
removedAces.append(_ace_to_text(tAce, objectType))
else:
counter = counter + 1
if removedAces:
ret['changes']['Removed ACEs'] = removedAces
else:
ret['changes']['Non-Inherited ACEs'] = 'Left in the DACL'
win32security.SetNamedSecurityInfo(
path, objectType,
win32security.DACL_SECURITY_INFORMATION | win32security.UNPROTECTED_DACL_SECURITY_INFORMATION,
None, None, tdacl, None)
ret['changes']['Inheritance'] = 'Enabled'
else:
if not copy:
counter = 0
inheritedAcesRemoved = []
while counter < tdacl.GetAceCount():
tAce = tdacl.GetAce(counter)
if (tAce[0][1] & win32security.INHERITED_ACE) == win32security.INHERITED_ACE:
tdacl.DeleteAce(counter)
inheritedAcesRemoved.append(_ace_to_text(tAce, objectType))
else:
counter = counter + 1
if inheritedAcesRemoved:
ret['changes']['Removed ACEs'] = inheritedAcesRemoved
else:
ret['changes']['Previously Inherited ACEs'] = 'Copied to the DACL'
win32security.SetNamedSecurityInfo(
path, objectType,
win32security.DACL_SECURITY_INFORMATION | win32security.PROTECTED_DACL_SECURITY_INFORMATION,
None, None, tdacl, None)
ret['changes']['Inheritance'] = 'Disabled'
ret['result'] = True
except Exception as e:
ret['result'] = False
ret['comment'] = 'Error attempting to set the inheritance. The error was {0}.'.format(e)
return ret
def enable_inheritance(path, objectType, clear=False):
'''
enable/disable inheritance on an object
clear = True will remove non-Inherited ACEs from the ACL
'''
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectType)
return _set_dacl_inheritance(path, objectType, True, None, clear)
def disable_inheritance(path, objectType, copy=True):
'''
disable inheritance on an object
copy = True will copy the Inerhited ACEs to the DACL before disabling inheritance
'''
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectType)
return _set_dacl_inheritance(path, objectType, False, copy, None)
def check_inheritance(path, objectType, user=None):
'''
check a specified path to verify if inheritance is enabled
returns 'Inheritance' of True/False
path: path of the registry key or file system object to check
user: if provided, will consider only the ACEs for that user
'''
ret = {'result': False,
'Inheritance': False,
'comment': ''}
sidRet = _getUserSid(user)
if not sidRet['result']:
return sidRet
dc = daclConstants()
objectType = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectType)
try:
sd = win32security.GetNamedSecurityInfo(path, objectType, win32security.DACL_SECURITY_INFORMATION)
dacls = sd.GetSecurityDescriptorDacl()
except Exception as e:
ret['result'] = False
ret['comment'] = 'Error obtaining the Security Descriptor or DACL of the path: {0}.'.format(e)
return ret
for counter in range(0, dacls.GetAceCount()):
ace = dacls.GetAce(counter)
if (ace[0][1] & win32security.INHERITED_ACE) == win32security.INHERITED_ACE:
if not sidRet['sid'] or ace[2] == sidRet['sid']:
ret['Inheritance'] = True
return ret
ret['result'] = True
return ret
def check_ace(path, objectType, user, permission=None, acetype=None, propagation=None, exactPermissionMatch=False):
'''
checks a path to verify the ACE (access control entry) specified exists
returns 'Exists' true if the ACE exists, false if it does not
path: path to the file/reg key
user: user that the ACL is for
permission: permission to test for (READ, FULLCONTROl, etc)
acetype: the type of ACE (ALLOW or DENY)
propagation: the propagation type of the ACE (FILES, FOLDERS, KEY, KEY&SUBKEYS, SUBKEYS, etc)
exactPermissionMatch: the ACL must match exactly, IE if READ is specified, the user must have READ exactly and not FULLCONTROL (which also has the READ permission obviously)
'''
ret = {'result': False,
'Exists': False,
'comment': ''}
dc = daclConstants()
objectTypeBit = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectTypeBit)
permission = permission.upper() if permission else None
acetype = acetype.upper() if permission else None
propagation = propagation.upper() if propagation else None
permissionbit = dc.getPermissionBit(objectTypeBit, permission) if permission else None
acetypebit = dc.getAceTypeBit(acetype) if acetype else None
propagationbit = dc.getPropagationBit(objectTypeBit, propagation) if propagation else None
sidRet = _getUserSid(user)
if not sidRet['result']:
return sidRet
dacls = _get_dacl(path, objectTypeBit)
ret['result'] = True
if dacls:
for counter in range(0, dacls.GetAceCount()):
ace = dacls.GetAce(counter)
if ace[2] == sidRet['sid']:
if not acetypebit or ace[0][0] == acetypebit:
if not propagationbit or (ace[0][1] & propagationbit) == propagationbit:
if not permissionbit:
ret['Exists'] = True
return ret
if exactPermissionMatch:
if ace[1] == permissionbit:
ret['Exists'] = True
return ret
else:
if (ace[1] & permissionbit) == permissionbit:
ret['Exists'] = True
return ret
else:
ret['comment'] = 'No DACL found for object.'
return ret
| apache-2.0 |
azusa/hatunatu | hatunatu/src/test/java/jp/fieldnotes/hatunatu/dao/impl/dao/Employee14Dao.java | 797 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package jp.fieldnotes.hatunatu.dao.impl.dao;
import jp.fieldnotes.hatunatu.dao.annotation.tiger.Query;
public interface Employee14Dao {
@Query("")
public void update();
}
| apache-2.0 |
dtrihinas/JCatascopia-Probe-Repo | JCatascopia-Probe-Repo/WindowsProbe/src/main/java/WindowsDiskProbe.java | 2583 | /*******************************************************************************
* Copyright 2014, Laboratory of Internet Computing (LInC), Department of Computer Science, University of Cyprus
*
* For any information relevant to JCatascopia Monitoring System,
* please contact Demetris Trihinas, trihinas{at}cs.ucy.ac.cy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
import java.io.IOException;
import java.util.HashMap;
import eu.celarcloud.jcatascopia.probes.Probe;
import eu.celarcloud.jcatascopia.probes.ProbeMetric;
import eu.celarcloud.jcatascopia.probes.ProbePropertyType;
/**
*
* @author Chris Smowton
*
*/
public class WindowsDiskProbe extends Probe{
private static String diskCommand = PowershellHelper.makePowershellStatsCommand(
new String[] {
"\\LogicalDisk(C:)\\Free Megabytes",
"\\LogicalDisk(C:)\\% Free Space"
});
public WindowsDiskProbe(String name, int freq){
super(name,freq);
this.addProbeProperty(0,"diskFree",ProbePropertyType.LONG,"MB","available disk space in MB");
this.addProbeProperty(1,"diskFreePercent",ProbePropertyType.DOUBLE,"%","disk space free (%)");
}
public WindowsDiskProbe(){
this("WindowsDiskProbe",60);
}
@Override
public String getDescription() {
return "WindowsDiskProbe collects Disk usage stats";
}
public ProbeMetric collectOrThrow() throws IOException {
ProbePropertyType[] types = new ProbePropertyType[] { ProbePropertyType.LONG, ProbePropertyType.DOUBLE };
HashMap<Integer, Object> values = PowershellHelper.powershellToStats(diskCommand, types);
return new ProbeMetric(values);
}
public ProbeMetric collect() {
try {
return collectOrThrow();
}
catch(Exception e) {
System.err.println("Windows disk probe failed: " + e.toString());
return new ProbeMetric(new HashMap<Integer, Object>());
}
}
/**
* @param args
*/
public static void main(String[] args) {
WindowsDiskProbe diskprobe = new WindowsDiskProbe();
diskprobe.activate();
}
} | apache-2.0 |
sockeqwe/mosby | mvi-integration-test/src/androidTest/java/com/hannesdorfmann/mosby3/mvi/integrationtest/lifecycle/activity/MviFinishInOnCreateActivityTest.java | 1501 | /*
* Copyright 2016 Hannes Dorfmann.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.hannesdorfmann.mosby3.mvi.integrationtest.lifecycle.activity;
import android.content.pm.ActivityInfo;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import com.hannesdorfmann.mosby3.mvi.integrationtest.lifecycle.LifecycleTestPresenter;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class) public class MviFinishInOnCreateActivityTest {
@Rule public ActivityTestRule<MviFinishInOnCreateActivity> rule =
new ActivityTestRule<>(MviFinishInOnCreateActivity.class);
@Test public void finishInOnCreate() throws Exception {
// Context of the app under test.
MviFinishInOnCreateActivity activity = rule.getActivity();
activity.onDestroyReached.blockingFirst(); // Waits until onDestroy() is reached
}
}
| apache-2.0 |
lordelph/phrekyll | tests/Phrekyll/Processor/TwigTest.php | 5608 | <?php
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @category Phrekyll
* @package Phrekyll\Processor
* @author Victor Farazdagi
* @license http://www.apache.org/licenses/LICENSE-2.0
*/
namespace PhrekyllTest\Processor;
use Phrekyll\Processor\Twig as Processor;
class TestProcessor
extends Processor
{
public function cleanup()
{
$this->getEnvironment()->clearCacheFiles();
}
}
/**
* @category Phrekyll
* @package Phrekyll\Processor
* @author Victor Farazdagi
*/
class TwigTest
extends \PHPUnit_Framework_TestCase
{
private $path;
public function setUp()
{
$this->path = dirname(__FILE__) . '/templates/';
}
public function testRender()
{
$processor = $this->getProcessor($this->path . 'tpl1.twig');
$template = file_get_contents($this->path . 'tpl1.twig');
$rendered = $processor->render($template, array(
'a_variable' => 'Aha!',
'navigation' => array(
array(
'href' => 'link1',
'caption' => 'caption1'
),
array(
'href' => 'link1',
'caption' => 'caption1'
)
)
));
$static = file_get_contents($this->path . 'tpl1.html');
$this->assertSame(trim($static), trim($rendered));
}
public function testRenderConstructorInjection()
{
$cache_dir = dirname(__FILE__) . '/templates/cache/';
$processor = $this->getProcessor(
$this->path . 'tpl1.twig', array(
'cache' => $cache_dir,
)
);
$template = file_get_contents($this->path . 'tpl1.twig');
$rendered = $processor->render($template, array(
'a_variable' => 'Aha!',
'navigation' => array(
array(
'href' => 'link1',
'caption' => 'caption1'
),
array(
'href' => 'link1',
'caption' => 'caption1'
)
)
));
$static = file_get_contents(dirname(__FILE__) . '/templates/tpl1.html');
$this->assertSame(trim($static), trim($rendered));
$processor->cleanup(); // purge cache
`touch ${cache_dir}README`; // cache clears all files
}
/**
* @group cur
*/
public function testTwigInclude()
{
$processor = $this->getProcessor($this->path . 'twig-include.twig');
$template = file_get_contents($this->path . 'twig-include.twig');
$rendered = $processor->render($template, array(
'a_variable' => 'Aha!',
'boxes' => array(
array(
'size' => 'huge',
'title' => 'phelephant'
),
array(
'size' => 'tiny',
'title' => 'mouse'
)
)
));
$static = file_get_contents(dirname(__FILE__) . '/templates/twig-include.html');
$this->assertSame(trim($static), trim($rendered));
}
public function testInheritedTemplates()
{
$processor = $this->getProcessor($this->path . 'twig-child.twig');
$template = file_get_contents($this->path . 'twig-child.twig');
$rendered = $processor->render($template, array(
'a_variable' => 'Aha!',
'boxes' => array(
array(
'size' => 'huge',
'title' => 'phelephant'
),
array(
'size' => 'tiny',
'title' => 'mouse'
)
)
));
$static = file_get_contents(dirname(__FILE__) . '/templates/twig-inherit.html');
$this->assertSame(trim($static), trim($rendered));
}
public function testStripFrontmatter()
{
$processor = $this->getProcessor($this->path . 'twig-child-with-fm.twig');
$template = file_get_contents($this->path . 'twig-child-with-fm.twig');
$rendered = $processor->render($template, array(
'a_variable' => 'Aha!',
'boxes' => array(
array(
'size' => 'huge',
'title' => 'phelephant'
),
array(
'size' => 'tiny',
'title' => 'mouse'
)
)
));
$static = file_get_contents(dirname(__FILE__) . '/templates/twig-inherit.html');
$this->assertSame(trim($static), trim($rendered));
}
private function getProcessor($inputFile, $extraOpts = array())
{
$options = array(
'phr_template_filename' => basename($inputFile),
'phr_template_dir' => dirname($inputFile),
);
return new TestProcessor(array_merge($options, $extraOpts));
}
}
| apache-2.0 |
andreaceccanti/simple-pdp-java | src/main/java/org/italiangrid/authz/pdp/Decision.java | 112 | package org.italiangrid.authz.pdp;
public enum Decision {
DENY,
PERMIT,
NOT_APPLICABLE,
INDETERMINATE;
}
| apache-2.0 |
billchen198318/qifu | core-lib/src/org/qifu/ui/impl/If.java | 2291 | /*
* Copyright 2012-2017 qifu of copyright Chen Xin Nien
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------
*
* author: Chen Xin Nien
* contact: chen.xin.nien@gmail.com
*
*/
package org.qifu.ui.impl;
import javax.servlet.jsp.PageContext;
import org.qifu.ui.UIComponent;
import org.qifu.ui.UIComponentValueUtils;
public class If implements UIComponent {
private PageContext pageContext = null;
private String scope = "";
private String test = "";
@Override
public void setId(String id) {
}
@Override
public String getId() {
return "";
}
@Override
public void setName(String name) {
}
@Override
public String getName() {
return "";
}
@Override
public String getScript() throws Exception {
return "";
}
@Override
public String getHtml() throws Exception {
return "";
}
@Override
public void setPageContext(PageContext pageContext) {
this.pageContext = pageContext;
}
public String getScope() {
return scope;
}
public void setScope(String scope) {
this.scope = scope;
}
public String getTest() {
return test;
}
public void setTest(String test) {
this.test = test;
}
public Boolean getTestResult() {
Object objVal = null;
if ( SCOPE_SESSION.equals(this.scope) ) {
objVal = UIComponentValueUtils.getOgnlProcessObjectFromHttpSession(this.pageContext, this.test);
} else {
objVal = UIComponentValueUtils.getOgnlProcessObjectFromPageContextOrRequest(this.pageContext, this.test);
}
if ( objVal instanceof Boolean ) {
UIComponentValueUtils.putIfResult(pageContext, (Boolean) objVal);
return (Boolean) objVal;
}
UIComponentValueUtils.putIfResult(pageContext, Boolean.FALSE);
return false;
}
}
| apache-2.0 |
ngvozdiev/ncode | src/common/file.cc | 8081 | // Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: kenton@google.com (Kenton Varda)
// emulates google3/file/base/file.cc
#include <stdio.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/param.h>
#ifdef _MSC_VER
#define WIN32_LEAN_AND_MEAN // yeah, right
#include <windows.h> // Find*File(). :(
#include <io.h>
#include <direct.h>
#else
#include <dirent.h>
#include <unistd.h>
#endif
#include <errno.h>
#include <string.h>
#include <fstream>
#include "file.h"
#include "logging.h"
#include "strutil.h"
namespace ncode {
#ifdef _WIN32
#define mkdir(name, mode) mkdir(name)
// Windows doesn't have symbolic links.
#define lstat stat
#ifndef F_OK
#define F_OK 00 // not defined by MSVC for whatever reason
#endif
#endif
bool File::Exists(const std::string& name) {
return access(name.c_str(), F_OK) == 0;
}
bool File::ReadFileToString(const std::string& name, std::string* output) {
char buffer[1024];
FILE* file = fopen(name.c_str(), "rb");
if (file == NULL) return false;
while (true) {
size_t n = fread(buffer, 1, sizeof(buffer), file);
if (n <= 0) break;
output->append(buffer, n);
}
int error = ferror(file);
if (fclose(file) != 0) return false;
return error == 0;
}
void File::MoveOrDie(const std::string& src, const std::string& dst) {
CHECK(rename(src.c_str(), dst.c_str()) == 0)
<< "Could not rename " << src << " to " << dst << ": " << strerror(errno);
}
bool File::FileOrDirectory(const std::string& name, bool* directory) {
struct stat statbuf;
if (stat(name.c_str(), &statbuf) != 0) {
return false;
}
*directory = S_ISDIR(statbuf.st_mode);
return true;
}
int File::FileSizeOrDie(const std::string& name) {
struct stat statbuf;
CHECK(stat(name.c_str(), &statbuf) == 0) << "Bad fstat: " << strerror(errno);
CHECK(!S_ISDIR(statbuf.st_mode)) << "File is a directory: " << name;
return statbuf.st_size;
}
std::string File::ExtractFileName(const std::string& file_location) {
std::vector<std::string> pieces = Split(file_location, "/", true);
CHECK(pieces.size() > 0);
return pieces.back();
}
std::string File::ReadFileToStringOrDie(const std::string& name) {
std::string output;
CHECK(ReadFileToString(name, &output)) << "Could not read: " << name
<< " from " << WorkingDirectoryOrDie();
return output;
}
bool File::WriteStringToFile(const std::string& contents,
const std::string& name) {
FILE* file = fopen(name.c_str(), "wb");
if (file == NULL) {
LOG(ERROR) << "fopen(" << name << ", \"wb\"): " << strerror(errno);
return false;
}
if (fwrite(contents.data(), 1, contents.size(), file) != contents.size()) {
LOG(ERROR) << "fwrite(" << name << "): " << strerror(errno);
return false;
}
if (fclose(file) != 0) {
return false;
}
return true;
}
void File::WriteStringToFileOrDie(const std::string& contents,
const std::string& name) {
FILE* file = fopen(name.c_str(), "wb");
CHECK(file != NULL) << "fopen(" << name << ", \"wb\"): " << strerror(errno);
CHECK_EQ(fwrite(contents.data(), 1, contents.size(), file), contents.size())
<< "fwrite(" << name << "): " << strerror(errno);
CHECK(fclose(file) == 0) << "fclose(" << name << "): " << strerror(errno);
}
bool File::CreateDir(const std::string& name, int mode) {
return mkdir(name.c_str(), mode) == 0;
}
bool File::RecursivelyCreateDir(const std::string& path, int mode) {
if (CreateDir(path, mode)) return true;
if (Exists(path)) return false;
// Try creating the parent.
std::string::size_type slashpos = path.find_last_of('/');
if (slashpos == std::string::npos) {
// No parent given.
return false;
}
return RecursivelyCreateDir(path.substr(0, slashpos), mode) &&
CreateDir(path, mode);
}
void File::DeleteRecursively(const std::string& name, void* dummy1,
void* dummy2) {
Unused(dummy1);
Unused(dummy2);
if (name.empty()) return;
// We don't care too much about error checking here since this is only used
// in tests to delete temporary directories that are under /tmp anyway.
#ifdef _MSC_VER
// This interface is so weird.
WIN32_FIND_DATA find_data;
HANDLE find_handle = FindFirstFile((name + "/*").c_str(), &find_data);
if (find_handle == INVALID_HANDLE_VALUE) {
// Just delete it, whatever it is.
DeleteFile(name.c_str());
RemoveDirectory(name.c_str());
return;
}
do {
string entry_name = find_data.cFileName;
if (entry_name != "." && entry_name != "..") {
string path = name + "/" + entry_name;
if (find_data.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
DeleteRecursively(path, NULL, NULL);
RemoveDirectory(path.c_str());
} else {
DeleteFile(path.c_str());
}
}
} while (FindNextFile(find_handle, &find_data));
FindClose(find_handle);
RemoveDirectory(name.c_str());
#else
// Use opendir()! Yay!
// lstat = Don't follow symbolic links.
struct stat stats;
if (lstat(name.c_str(), &stats) != 0) return;
if (S_ISDIR(stats.st_mode)) {
DIR* dir = opendir(name.c_str());
if (dir != NULL) {
while (true) {
struct dirent* entry = readdir(dir);
if (entry == NULL) break;
std::string entry_name = entry->d_name;
if (entry_name != "." && entry_name != "..") {
DeleteRecursively(name + "/" + entry_name, NULL, NULL);
}
}
}
closedir(dir);
rmdir(name.c_str());
} else if (S_ISREG(stats.st_mode)) {
remove(name.c_str());
}
#endif
}
bool File::ChangeWorkingDirectory(const std::string& new_working_directory) {
return chdir(new_working_directory.c_str()) == 0;
}
std::string File::WorkingDirectoryOrDie() {
char buf[MAXPATHLEN];
CHECK_NOTNULL(getcwd(buf, MAXPATHLEN));
return std::string(buf);
}
std::string File::PickFileName(const std::string& dir, size_t len) {
std::string filename;
while (true) {
filename = ncode::StrCat(dir, "/", RandomString(len));
if (!File::Exists(filename)) {
return filename;
}
}
}
bool File::ReadLines(const std::string& name,
std::function<void(const std::string& line)> callback) {
std::ifstream infile(name);
if (infile.fail()) {
return false;
}
std::string line;
while (std::getline(infile, line)) {
callback(line);
}
return true;
}
} // namespace ncode
| apache-2.0 |
safesoftware/fmepedia-speech-to-call | www/js/index.js | 6976 | var langs =
[['Afrikaans', ['af-ZA']],
['Bahasa Indonesia',['id-ID']],
['Bahasa Melayu', ['ms-MY']],
['Català', ['ca-ES']],
['Čeština', ['cs-CZ']],
['Deutsch', ['de-DE']],
['English', ['en-AU', 'Australia'],
['en-CA', 'Canada'],
['en-IN', 'India'],
['en-NZ', 'New Zealand'],
['en-ZA', 'South Africa'],
['en-GB', 'United Kingdom'],
['en-US', 'United States']],
['Español', ['es-AR', 'Argentina'],
['es-BO', 'Bolivia'],
['es-CL', 'Chile'],
['es-CO', 'Colombia'],
['es-CR', 'Costa Rica'],
['es-EC', 'Ecuador'],
['es-SV', 'El Salvador'],
['es-ES', 'España'],
['es-US', 'Estados Unidos'],
['es-GT', 'Guatemala'],
['es-HN', 'Honduras'],
['es-MX', 'México'],
['es-NI', 'Nicaragua'],
['es-PA', 'Panamá'],
['es-PY', 'Paraguay'],
['es-PE', 'Perú'],
['es-PR', 'Puerto Rico'],
['es-DO', 'República Dominicana'],
['es-UY', 'Uruguay'],
['es-VE', 'Venezuela']],
['Euskara', ['eu-ES']],
['Français', ['fr-FR']],
['Galego', ['gl-ES']],
['Hrvatski', ['hr_HR']],
['IsiZulu', ['zu-ZA']],
['Íslenska', ['is-IS']],
['Italiano', ['it-IT', 'Italia'],
['it-CH', 'Svizzera']],
['Magyar', ['hu-HU']],
['Nederlands', ['nl-NL']],
['Norsk bokmål', ['nb-NO']],
['Polski', ['pl-PL']],
['Português', ['pt-BR', 'Brasil'],
['pt-PT', 'Portugal']],
['Română', ['ro-RO']],
['Slovenčina', ['sk-SK']],
['Suomi', ['fi-FI']],
['Svenska', ['sv-SE']],
['Türkçe', ['tr-TR']],
['български', ['bg-BG']],
['Pусский', ['ru-RU']],
['Српски', ['sr-RS']],
['한국어', ['ko-KR']],
['中文', ['cmn-Hans-CN', '普通话 (中国大陆)'],
['cmn-Hans-HK', '普通话 (香港)'],
['cmn-Hant-TW', '中文 (台灣)'],
['yue-Hant-HK', '粵語 (香港)']],
['日本語', ['ja-JP']],
['Lingua latīna', ['la']]];
var create_email = false;
var final_transcript = '';
var recognizing = false;
var ignore_onend;
var start_timestamp;
var recognition;
$(document).ready(function() {
for (var i = 0; i < langs.length; i++) {
select_language.options[i] = new Option(langs[i][0], i);
}
select_language.selectedIndex = 6;
updateCountry();
select_dialect.selectedIndex = 6;
showInfo('info_start');
FMEServer.init({
server : "https://bluesky-safe-software.fmecloud.com",
token : "ede6ebfc78ba58b4f69a47ee804a613d0628eb33"
});
if (!('webkitSpeechRecognition' in window)) {
upgrade();
} else {
start_button.style.display = 'inline-block';
recognition = new webkitSpeechRecognition();
recognition.continuous = true;
recognition.interimResults = false;
recognition.onstart = function() {
recognizing = true;
showInfo('info_speak_now');
start_img.src = 'https://www.google.com/intl/en/chrome/assets/common/images/content/mic-animate.gif';
};
recognition.onerror = function(event) {
if (event.error == 'no-speech') {
start_img.src = 'https://www.google.com/intl/en/chrome/assets/common/images/content/mic.gif';
showInfo('info_no_speech');
ignore_onend = true;
}
if (event.error == 'audio-capture') {
start_img.src = 'https://www.google.com/intl/en/chrome/assets/common/images/content/mic.gif';
showInfo('info_no_microphone');
ignore_onend = true;
}
if (event.error == 'not-allowed') {
if (event.timeStamp - start_timestamp < 100) {
showInfo('info_blocked');
} else {
showInfo('info_denied');
}
ignore_onend = true;
}
};
recognition.onend = function() {
recognizing = false;
if (ignore_onend) {
return;
}
start_img.src = 'https://www.google.com/intl/en/chrome/assets/common/images/content/mic.gif';
if (!final_transcript) {
showInfo('info_start');
return;
}
showInfo('');
if (window.getSelection) {
window.getSelection().removeAllRanges();
var range = document.createRange();
range.selectNode(document.getElementById('final_span'));
window.getSelection().addRange(range);
}
if (create_email) {
create_email = false;
createEmail();
}
};
recognition.onresult = function(event) {
var interim_transcript = '';
for (var i = event.resultIndex; i < event.results.length; ++i) {
if (event.results[i].isFinal) {
final_transcript += event.results[i][0].transcript;
} else {
interim_transcript += event.results[i][0].transcript;
}
}
final_transcript = capitalize(final_transcript);
final_span.innerHTML = linebreak(final_transcript);
interim_span.innerHTML = linebreak(interim_transcript);
if (final_transcript || interim_transcript) {
$('#call-content').val(final_transcript);
}
};
}
});
function updateCountry() {
for (var i = select_dialect.options.length - 1; i >= 0; i--) {
select_dialect.remove(i);
}
var list = langs[select_language.selectedIndex];
for (var i = 1; i < list.length; i++) {
select_dialect.options.add(new Option(list[i][1], list[i][0]));
}
select_dialect.style.visibility = list[1].length == 1 ? 'hidden' : 'visible';
}
function upgrade() {
start_button.style.visibility = 'hidden';
showInfo('info_upgrade');
}
var two_line = /\n\n/g;
var one_line = /\n/g;
function linebreak(s) {
return s.replace(two_line, '<p></p>').replace(one_line, '<br>');
}
var first_char = /\S/;
function capitalize(s) {
return s.replace(first_char, function(m) { return m.toUpperCase(); });
}
function startButton(event) {
if (recognizing) {
recognition.stop();
return;
}
final_transcript = '';
recognition.lang = select_dialect.value;
recognition.start();
ignore_onend = false;
final_span.innerHTML = '';
interim_span.innerHTML = '';
start_img.src = 'https://www.google.com/intl/en/chrome/assets/common/images/content/mic-slash.gif';
showInfo('info_allow');
start_timestamp = event.timeStamp;
}
function showInfo(s) {
if (s) {
for (var child = info.firstChild; child; child = child.nextSibling) {
if (child.style) {
child.style.display = child.id == s ? 'inline' : 'none';
}
}
info.style.visibility = 'visible';
} else {
info.style.visibility = 'hidden';
}
}
function showResults( json ) {
$('#success-modal').modal('show');
}
function submitJob(){
var inputs = $('#fme-form :input');
var params = { "publishedParameters" : [] };
var publishedParameters = params.publishedParameters;
inputs.each(function() {
if (this.type != "button") {
var obj = { "name" : "", "value" : null };
obj.name = this.name;
obj.value = this.value;
publishedParameters.push( obj );
}
});
FMEServer.submitJob( 'Stewart', 'TwilioService.fmw', params, showResults );
} | apache-2.0 |
nullcodeexecutor/rural | modules/core/src/main/java/org/rural/render/FreeMarkerView.java | 2152 | package org.rural.render;
import freemarker.template.Configuration;
import freemarker.template.DefaultObjectWrapper;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.rural.core.ActionContext;
import org.rural.context.RuralContext;
import org.rural.exception.RuralException;
import org.rural.ui.Model;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
/**
* Created by yuantao on 2014/8/31.
*/
public class FreeMarkerView extends AbstractView {
private final static Log log = LogFactory.getLog(FreeMarkerView.class);
private FreeMarkerView(){
initConfiguration();
}
private static View view = null;
private Configuration cfg = null;
static {
view = new FreeMarkerView();
}
public static View me(){
return view;
}
private void initConfiguration(){
ServletContext servletContext = ActionContext.context().getServletContext();
cfg = new Configuration();
try {
File loadPath = new File(servletContext.getRealPath("") + RuralContext.context().getRuralConfigBean().getPageLocation());
log.info("freemarker load path: " + loadPath.getAbsolutePath());
cfg.setDirectoryForTemplateLoading(loadPath);
} catch (IOException e) {
throw new RuralException(e);
}
cfg.setObjectWrapper(new DefaultObjectWrapper());
}
@Override
public void forward(String viewName, HttpServletRequest request, HttpServletResponse response, Model model) throws ServletException, IOException {
Template template = cfg.getTemplate(viewName + ".ftl");
PrintWriter out = response.getWriter();
try {
template.process(model, out);
} catch (TemplateException e) {
throw new RuralException(e);
}
out.close();
}
}
| apache-2.0 |
cloudera/Impala | tests/custom_cluster/test_restart_services.py | 20577 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import pytest
import psutil
import re
import socket
import time
from tests.common.environ import build_flavor_timeout
from time import sleep
from impala.error import HiveServer2Error
from TCLIService import TCLIService
from tests.beeswax.impala_beeswax import ImpalaBeeswaxException
from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
from tests.common.skip import SkipIfEC
from tests.hs2.hs2_test_suite import HS2TestSuite, needs_session
LOG = logging.getLogger(__name__)
class TestRestart(CustomClusterTestSuite):
@classmethod
def get_workload(cls):
return 'functional-query'
@pytest.mark.execute_serially
def test_restart_statestore(self, cursor):
""" Regression test of IMPALA-6973. After the statestore restarts, the metadata should
eventually recover after being cleared by the new statestore.
"""
self.cluster.statestored.restart()
# We need to wait for the impalad to register to the new statestored and for a
# non-empty catalog update from the new statestored. It cannot be expressed with the
# existing metrics yet so we wait for some time here.
wait_time_s = build_flavor_timeout(60, slow_build_timeout=100)
sleep(wait_time_s)
for retry in xrange(wait_time_s):
try:
cursor.execute("describe database functional")
return
except HiveServer2Error, e:
assert "AnalysisException: Database does not exist: functional" in e.message,\
"Unexpected exception: " + e.message
sleep(1)
assert False, "Coordinator never received non-empty metadata from the restarted " \
"statestore after {0} seconds".format(wait_time_s)
@pytest.mark.execute_serially
def test_restart_impala(self):
""" This test aims to restart Impalad executor nodes between queries to exercise
the cluster membership callback which removes stale connections to the restarted
nodes."""
self._start_impala_cluster([], num_coordinators=1, cluster_size=3)
assert len(self.cluster.impalads) == 3
client = self.cluster.impalads[0].service.create_beeswax_client()
assert client is not None
for i in xrange(5):
self.execute_query_expect_success(client, "select * from functional.alltypes")
node_to_restart = 1 + (i % 2)
self.cluster.impalads[node_to_restart].restart()
# Sleep for a bit for the statestore change in membership to propagate. The min
# update frequency for statestore is 100ms but using a larger sleep time here
# as certain builds (e.g. ASAN) can be really slow.
sleep(3)
client.close()
def parse_shutdown_result(result):
"""Parse the shutdown result string and return the strings (grace left,
deadline left, queries registered, queries executing)."""
assert len(result.data) == 1
summary = result.data[0]
match = re.match(r'startup grace period left: ([0-9ms]*), deadline left: ([0-9ms]*), ' +
r'queries registered on coordinator: ([0-9]*), queries executing: ([0-9]*), ' +
r'fragment instances: [0-9]*', summary)
assert match is not None, summary
return match.groups()
class TestShutdownCommand(CustomClusterTestSuite, HS2TestSuite):
IDLE_SHUTDOWN_GRACE_PERIOD_S = 1
@classmethod
def get_workload(cls):
return 'functional-query'
@pytest.mark.execute_serially
@CustomClusterTestSuite.with_args(
impalad_args="--shutdown_grace_period_s={grace_period} \
--hostname={hostname}".format(grace_period=IDLE_SHUTDOWN_GRACE_PERIOD_S,
hostname=socket.gethostname()))
def test_shutdown_idle(self):
"""Test that idle impalads shut down in a timely manner after the startup grace period
elapses."""
impalad1 = psutil.Process(self.cluster.impalads[0].get_pid())
impalad2 = psutil.Process(self.cluster.impalads[1].get_pid())
impalad3 = psutil.Process(self.cluster.impalads[2].get_pid())
# Test that a failed shut down from a bogus host or port fails gracefully.
ex = self.execute_query_expect_failure(self.client,
":shutdown('e6c00ca5cd67b567eb96c6ecfb26f05')")
assert "Could not find IPv4 address for:" in str(ex)
ex = self.execute_query_expect_failure(self.client, ":shutdown('localhost:100000')")
assert "Invalid port:" in str(ex)
assert ("This may be because the port specified is wrong.") not in str(ex)
# Test that pointing to the wrong thrift service (the HS2 port) fails gracefully-ish.
thrift_ports = [21051, 22001] # HS2 port, old backend port.
for port in thrift_ports:
ex = self.execute_query_expect_failure(self.client,
":shutdown('localhost:{0}')".format(port))
assert ("failed with error 'RemoteShutdown() RPC failed") in str(ex)
assert ("This may be because the port specified is wrong.") in str(ex)
# Test RPC error handling with debug action.
ex = self.execute_query_expect_failure(self.client, ":shutdown('localhost:27001')",
query_options={'debug_action': 'CRS_SHUTDOWN_RPC:FAIL'})
assert 'Rpc to 127.0.0.1:27001 failed with error \'Debug Action: ' \
'CRS_SHUTDOWN_RPC:FAIL' in str(ex)
# Test remote shutdown.
LOG.info("Start remote shutdown {0}".format(time.time()))
self.execute_query_expect_success(self.client, ":shutdown('localhost:27001')",
query_options={})
# Remote shutdown does not require statestore.
self.cluster.statestored.kill()
self.cluster.statestored.wait_for_exit()
self.execute_query_expect_success(self.client, ":shutdown('localhost:27002')",
query_options={})
# Test local shutdown, which should succeed even with injected RPC error.
LOG.info("Start local shutdown {0}".format(time.time()))
self.execute_query_expect_success(self.client,
":shutdown('{0}:27000')".format(socket.gethostname()),
query_options={'debug_action': 'CRS_SHUTDOWN_RPC:FAIL'})
# Make sure that the impala daemons exit after the startup grace period plus a 10
# second margin of error.
start_time = time.time()
LOG.info("Waiting for impalads to exit {0}".format(start_time))
impalad1.wait()
LOG.info("First impalad exited {0}".format(time.time()))
impalad2.wait()
LOG.info("Second impalad exited {0}".format(time.time()))
impalad3.wait()
LOG.info("Third impalad exited {0}".format(time.time()))
shutdown_duration = time.time() - start_time
assert shutdown_duration <= self.IDLE_SHUTDOWN_GRACE_PERIOD_S + 10
EXEC_SHUTDOWN_GRACE_PERIOD_S = 5
EXEC_SHUTDOWN_DEADLINE_S = 10
@pytest.mark.execute_serially
@SkipIfEC.scheduling
@CustomClusterTestSuite.with_args(
impalad_args="--shutdown_grace_period_s={grace_period} \
--shutdown_deadline_s={deadline} \
--hostname={hostname}".format(grace_period=EXEC_SHUTDOWN_GRACE_PERIOD_S,
deadline=EXEC_SHUTDOWN_DEADLINE_S, hostname=socket.gethostname()))
def test_shutdown_executor(self):
self.do_test_shutdown_executor(fetch_delay_s=0)
@pytest.mark.execute_serially
@SkipIfEC.scheduling
@CustomClusterTestSuite.with_args(
impalad_args="--shutdown_grace_period_s={grace_period} \
--shutdown_deadline_s={deadline} \
--stress_status_report_delay_ms={status_report_delay_ms} \
--hostname={hostname}".format(grace_period=EXEC_SHUTDOWN_GRACE_PERIOD_S,
deadline=EXEC_SHUTDOWN_DEADLINE_S, status_report_delay_ms=5000,
hostname=socket.gethostname()))
def test_shutdown_executor_with_delay(self):
"""Regression test for IMPALA-7931 that adds delays to status reporting and
to fetching of results to trigger races that previously resulted in query failures."""
print self.exploration_strategy
if self.exploration_strategy() != 'exhaustive':
pytest.skip()
self.do_test_shutdown_executor(fetch_delay_s=5)
def do_test_shutdown_executor(self, fetch_delay_s):
"""Implementation of test that shuts down and then restarts an executor. This should
not disrupt any queries that start after the shutdown or complete before the shutdown
time limit. The test is parameterized by 'fetch_delay_s', the amount to delay before
fetching from the query that must survive shutdown of an executor."""
# Add sleeps to make sure that the query takes a couple of seconds to execute on the
# executors.
QUERY = "select count(*) from functional_parquet.alltypes where sleep(1) = bool_col"
# Subtle: use a splittable file format like text for lineitem so that each backend
# is guaranteed to get scan ranges that contain some actual rows. With Parquet on
# S3, the files get broken into 32MB scan ranges and a backend might get unlucky
# and only get scan ranges that don't contain the midpoint of any row group, and
# therefore not actually produce any rows.
SLOW_QUERY = "select count(*) from tpch.lineitem where sleep(1) = l_orderkey"
SHUTDOWN_EXEC2 = ": shutdown('localhost:27001')"
# Run this query before shutdown and make sure that it executes successfully on
# all executors through the startup grace period without disruption.
before_shutdown_handle = self.__exec_and_wait_until_running(QUERY)
# Run this query which simulates getting stuck in admission control until after
# the startup grace period expires. This exercises the code path where the
# coordinator terminates the query before it has started up.
before_shutdown_admission_handle = self.execute_query_async(QUERY,
{'debug_action': 'CRS_BEFORE_ADMISSION:SLEEP@30000'})
# Shut down and wait for the shutdown state to propagate through statestore.
result = self.execute_query_expect_success(self.client, SHUTDOWN_EXEC2)
assert parse_shutdown_result(result) == (
"{0}s000ms".format(self.EXEC_SHUTDOWN_GRACE_PERIOD_S),
"{0}s000ms".format(self.EXEC_SHUTDOWN_DEADLINE_S), "0", "1")
# Check that the status is reflected on the debug page.
web_json = self.cluster.impalads[1].service.get_debug_webpage_json("")
assert web_json.get('is_quiescing', None) is True, web_json
assert 'shutdown_status' in web_json, web_json
self.impalad_test_service.wait_for_num_known_live_backends(2,
timeout=self.EXEC_SHUTDOWN_GRACE_PERIOD_S + 5, interval=0.2,
include_shutting_down=False)
# Run another query, which shouldn't get scheduled on the new executor. We'll let
# this query continue running through the full shutdown and restart cycle.
after_shutdown_handle = self.__exec_and_wait_until_running(QUERY)
# Wait for the impalad to exit, then start it back up and run another query, which
# should be scheduled on it again.
self.cluster.impalads[1].wait_for_exit()
# Finish fetching results from the first query (which will be buffered on the
# coordinator) after the backend exits. Add a delay before fetching to ensure
# that the query is not torn down on the coordinator when the failure is
# detected by the statestore (see IMPALA-7931).
assert self.__fetch_and_get_num_backends(
QUERY, before_shutdown_handle, delay_s=fetch_delay_s) == 3
# Confirm that the query stuck in admission failed.
self.__check_deadline_expired(QUERY, before_shutdown_admission_handle)
# Start the impalad back up and run another query, which should be scheduled on it
# again.
self.cluster.impalads[1].start()
self.impalad_test_service.wait_for_num_known_live_backends(
3, timeout=30, interval=0.2, include_shutting_down=False)
after_restart_handle = self.__exec_and_wait_until_running(QUERY)
# The query started while the backend was shut down should not run on that backend.
assert self.__fetch_and_get_num_backends(QUERY, after_shutdown_handle) == 2
assert self.__fetch_and_get_num_backends(QUERY, after_restart_handle) == 3
# Test that a query will fail when the executor shuts down after the limit.
deadline_expiry_handle = self.__exec_and_wait_until_running(SLOW_QUERY)
result = self.execute_query_expect_success(self.client, SHUTDOWN_EXEC2)
assert parse_shutdown_result(result) == (
"{0}s000ms".format(self.EXEC_SHUTDOWN_GRACE_PERIOD_S),
"{0}s000ms".format(self.EXEC_SHUTDOWN_DEADLINE_S), "0", "1")
self.cluster.impalads[1].wait_for_exit()
self.__check_deadline_expired(SLOW_QUERY, deadline_expiry_handle)
# Test that we can reduce the deadline after setting it to a high value.
# Run a query that will fail as a result of the reduced deadline.
deadline_expiry_handle = self.__exec_and_wait_until_running(SLOW_QUERY)
SHUTDOWN_EXEC3 = ": shutdown('localhost:27002', {0})"
VERY_HIGH_DEADLINE = 5000
HIGH_DEADLINE = 1000
LOW_DEADLINE = 5
result = self.execute_query_expect_success(
self.client, SHUTDOWN_EXEC3.format(HIGH_DEADLINE))
grace, deadline, _, _ = parse_shutdown_result(result)
assert grace == "{0}s000ms".format(self.EXEC_SHUTDOWN_GRACE_PERIOD_S)
assert deadline == "{0}m{1}s".format(HIGH_DEADLINE / 60, HIGH_DEADLINE % 60)
result = self.execute_query_expect_success(
self.client, SHUTDOWN_EXEC3.format(VERY_HIGH_DEADLINE))
_, deadline, _, _ = parse_shutdown_result(result)
LOG.info("Deadline is {0}".format(deadline))
min_string, sec_string = re.match("([0-9]*)m([0-9]*)s", deadline).groups()
assert int(min_string) * 60 + int(sec_string) <= HIGH_DEADLINE, \
"Cannot increase deadline " + deadline
result = self.execute_query_expect_success(
self.client, SHUTDOWN_EXEC3.format(LOW_DEADLINE))
_, deadline, _, queries_executing = parse_shutdown_result(result)
assert deadline == "{0}s000ms".format(LOW_DEADLINE)
assert int(queries_executing) > 0, "Slow query should still be running."
self.cluster.impalads[2].wait_for_exit()
self.__check_deadline_expired(SLOW_QUERY, deadline_expiry_handle)
COORD_SHUTDOWN_GRACE_PERIOD_S = 5
COORD_SHUTDOWN_DEADLINE_S = 120
@pytest.mark.execute_serially
@CustomClusterTestSuite.with_args(
impalad_args="--shutdown_grace_period_s={grace_period} \
--shutdown_deadline_s={deadline} \
--hostname={hostname}".format(
grace_period=COORD_SHUTDOWN_GRACE_PERIOD_S,
deadline=COORD_SHUTDOWN_DEADLINE_S, hostname=socket.gethostname()),
default_query_options=[("num_scanner_threads", "1")])
@needs_session(TCLIService.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6,
close_session=False)
def test_shutdown_coordinator(self):
"""Test that shuts down the coordinator. Running queries should finish but new
requests should be rejected."""
# Start a query running. This should complete successfully and keep the coordinator
# up until it finishes. We set NUM_SCANNER_THREADS=1 above to make the runtime more
# predictable.
SLOW_QUERY = """select * from tpch_parquet.lineitem where sleep(1) < l_orderkey"""
SHUTDOWN = ": shutdown()"
SHUTDOWN_ERROR_PREFIX = 'Server is being shut down:'
before_shutdown_handle = self.__exec_and_wait_until_running(SLOW_QUERY)
before_shutdown_hs2_handle = self.execute_statement(SLOW_QUERY).operationHandle
# Shut down the coordinator. Operations that start after this point should fail.
result = self.execute_query_expect_success(self.client, SHUTDOWN)
grace, deadline, registered, _ = parse_shutdown_result(result)
assert grace == "{0}s000ms".format(self.COORD_SHUTDOWN_GRACE_PERIOD_S)
assert deadline == "{0}m".format(self.COORD_SHUTDOWN_DEADLINE_S / 60), "4"
assert registered == "3"
# Expect that the beeswax shutdown error occurs when calling fn()
def expect_beeswax_shutdown_error(fn):
try:
fn()
except ImpalaBeeswaxException, e:
assert SHUTDOWN_ERROR_PREFIX in str(e)
expect_beeswax_shutdown_error(lambda: self.client.execute("select 1"))
expect_beeswax_shutdown_error(lambda: self.client.execute_async("select 1"))
# Test that the HS2 shutdown error occurs for various HS2 operations.
self.execute_statement("select 1", None, TCLIService.TStatusCode.ERROR_STATUS,
SHUTDOWN_ERROR_PREFIX)
def check_hs2_shutdown_error(hs2_response):
HS2TestSuite.check_response(hs2_response, TCLIService.TStatusCode.ERROR_STATUS,
SHUTDOWN_ERROR_PREFIX)
check_hs2_shutdown_error(self.hs2_client.OpenSession(TCLIService.TOpenSessionReq()))
check_hs2_shutdown_error(self.hs2_client.GetInfo(TCLIService.TGetInfoReq(
self.session_handle, TCLIService.TGetInfoType.CLI_MAX_DRIVER_CONNECTIONS)))
check_hs2_shutdown_error(self.hs2_client.GetTypeInfo(
TCLIService.TGetTypeInfoReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetCatalogs(
TCLIService.TGetCatalogsReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetSchemas(
TCLIService.TGetSchemasReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetTables(
TCLIService.TGetTablesReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetTableTypes(
TCLIService.TGetTableTypesReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetColumns(
TCLIService.TGetColumnsReq(self.session_handle)))
check_hs2_shutdown_error(self.hs2_client.GetFunctions(
TCLIService.TGetFunctionsReq(self.session_handle, functionName="")))
# Operations on running HS2 query still work.
self.fetch_until(before_shutdown_hs2_handle,
TCLIService.TFetchOrientation.FETCH_NEXT, 10)
HS2TestSuite.check_response(self.hs2_client.CancelOperation(
TCLIService.TCancelOperationReq(before_shutdown_hs2_handle)))
HS2TestSuite.check_response(self.hs2_client.CloseOperation(
TCLIService.TCloseOperationReq(before_shutdown_hs2_handle)))
# Make sure that the beeswax query is still executing, then close it to allow the
# coordinator to shut down.
self.impalad_test_service.wait_for_query_state(self.client, before_shutdown_handle,
self.client.QUERY_STATES['FINISHED'], timeout=20)
self.client.close_query(before_shutdown_handle)
self.cluster.impalads[0].wait_for_exit()
def __exec_and_wait_until_running(self, query, timeout=20):
"""Execute 'query' with self.client and wait until it is in the RUNNING state.
'timeout' controls how long we will wait"""
# Fix number of scanner threads to make runtime more deterministic.
handle = self.execute_query_async(query, {'num_scanner_threads': 1})
self.impalad_test_service.wait_for_query_state(self.client, handle,
self.client.QUERY_STATES['RUNNING'], timeout=20)
return handle
def __fetch_and_get_num_backends(self, query, handle, delay_s=0):
"""Fetch the results of 'query' from the beeswax handle 'handle', close the
query and return the number of backends obtained from the profile."""
self.impalad_test_service.wait_for_query_state(self.client, handle,
self.client.QUERY_STATES['FINISHED'], timeout=20)
if delay_s > 0:
LOG.info("sleeping for {0}s".format(delay_s))
time.sleep(delay_s)
self.client.fetch(query, handle)
profile = self.client.get_runtime_profile(handle)
self.client.close_query(handle)
backends_match = re.search("NumBackends: ([0-9]*)", profile)
assert backends_match is not None, profile
return int(backends_match.group(1))
def __check_deadline_expired(self, query, handle):
"""Check that the query with 'handle' fails because of a backend hitting the
deadline and shutting down."""
try:
self.client.fetch(query, handle)
assert False, "Expected query to fail"
except Exception, e:
assert 'Failed due to unreachable impalad(s)' in str(e)
| apache-2.0 |
adbrucker/SecureBPMN | designer/src/org.activiti.designer.gui/src/main/java/org/activiti/designer/security/features/CreateSecurityBodFeature.java | 2801 | /* Copyright 2012-2015 SAP SE
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.designer.security.features;
import org.activiti.designer.features.AbstractCreateFastBPMNFeature;
import org.eclipse.bpmn2.SubProcess;
import org.eclipse.graphiti.features.IFeatureProvider;
import org.eclipse.graphiti.features.context.ICreateContext;
import org.eclipse.graphiti.mm.pictograms.Diagram;
import org.eclipse.securebpmn2.BindingOfDuty;
import org.eclipse.securebpmn2.Securebpmn2Factory;
/**
*
*/
public class CreateSecurityBodFeature extends AbstractCreateFastBPMNFeature {
public static final String FEATURE_ID_KEY = "securityBod";
public CreateSecurityBodFeature(IFeatureProvider fp) {
super(fp, "BindingOfDuty", "Add Binding of Duty");
}
@Override
public boolean canCreate(ICreateContext context) {
Object parentObject = getBusinessObjectForPictogramElement(context.getTargetContainer());
return (context.getTargetContainer() instanceof Diagram || parentObject instanceof SubProcess);
}
@Override
public Object[] create(ICreateContext context) {
BindingOfDuty bod = Securebpmn2Factory.eINSTANCE.createBindingOfDuty();
bod.setId(getNextId());
bod.setName("bod");
bod.setDynamicEnforcement(false);
Object parentObject = getBusinessObjectForPictogramElement(context.getTargetContainer());
if (parentObject instanceof SubProcess) {
((SubProcess) parentObject).getFlowElements().add(bod);
} else {
getDiagram().eResource().getContents().add(bod);
}
addGraphicalContent(bod, context);
// activate direct editing after object creation
getFeatureProvider().getDirectEditingInfo().setActive(true);
return new Object[] { bod };
}
@Override
public String getCreateImageId() {
return "org.activiti.designer.security.bod";
}
@Override
protected String getFeatureIdKey() {
return FEATURE_ID_KEY;
}
@SuppressWarnings("rawtypes")
@Override
protected Class getFeatureClass() {
return Securebpmn2Factory.eINSTANCE.createBindingOfDuty().getClass();
}
}
| apache-2.0 |
zhantss/LuceneTool | src/test/test/orm/Sex.java | 69 | package test.orm;
public enum Sex {
Man,
Woman,
Private
}
| apache-2.0 |
MaxDeg/NinthChevron | NinthChevron.Data/Entity/ColumnAttribute.cs | 1510 | /**
* Copyright 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace NinthChevron.Data.Entity
{
[AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
public class ColumnAttribute : Attribute
{
public string Name { get; private set; }
public bool IsPrimaryKey { get; private set; }
public bool IsIdentity { get; private set; }
public bool IsNullable { get; private set; }
public ColumnAttribute(string name, bool isPrimaryKey, bool isIdentity, bool isNullable)
{
this.Name = name;
this.IsPrimaryKey = isPrimaryKey;
this.IsIdentity = isIdentity;
this.IsNullable = isNullable;
}
public ColumnAttribute(string name, bool isNullable)
: this(name, false, false, isNullable)
{ }
}
}
| apache-2.0 |
fifa2002nb/twos | server/Lib/Model/AccountingSubjectModel.class.php | 412 | <?php
/**
* @filename AccountingSubjectModel.class.php
* @encoding UTF-8
* @author nemo.xiaolan <a href="mailto:335454250@qq.com">335454250@qq.com</a>
* @link <a href="http://www.sep-v.com">http://www.sep-v.com</a>
* @license http://www.sep-v.com/code-license
* @datetime 2013-12-30 13:58:21
* @Description
*
*/
class AccountingSubjectModel extends CommonTreeModel {
//put your code here
}
?>
| apache-2.0 |
ausaccessfed/saml-service | spec/models/shibmd/scope_spec.rb | 371 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe SHIBMD::Scope, type: :model do
it { is_expected.to validate_presence :role_descriptor }
it { is_expected.to validate_presence :value }
it { is_expected.to validate_presence :regexp }
it { is_expected.to validate_presence :created_at }
it { is_expected.to validate_presence :updated_at }
end
| apache-2.0 |
thearkhelist/Pkcs11Interop | src/Pkcs11Interop/Pkcs11Interop/LowLevelAPI81/MechanismParams/CK_EXTRACT_PARAMS.cs | 1231 | /*
* Copyright 2012-2016 The Pkcs11Interop Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Written for the Pkcs11Interop project by:
* Jaroslav IMRICH <jimrich@jimrich.sk>
*/
using System.Runtime.InteropServices;
namespace Net.Pkcs11Interop.LowLevelAPI81.MechanismParams
{
/// <summary>
/// Provides the parameter to the CKM_EXTRACT_KEY_FROM_KEY mechanism
/// </summary>
[StructLayout(LayoutKind.Sequential, Pack = 1, CharSet = CharSet.Unicode)]
public struct CK_EXTRACT_PARAMS
{
/// <summary>
/// Specifies which bit of the base key should be used as the first bit of the derived key
/// </summary>
public ulong Bit;
}
}
| apache-2.0 |
ceefour/glut101 | glut111/glut111.cpp | 9339 | // OpenGL
// - Function to load bitmap
// - Texture Mapping Magnification Filter
// filter=0 --> Nearest Filtered Texture
// filter=1 --> Linear Interpolation Texture
// filter=2 --> Mipmapped Texture
#include "stdafx.h"
#include <stdlib.h>
#include <stdarg.h>
#include <string>
#include <stdio.h>
#include "../glut/glut.h"
#include <iostream>
#include "bmp.h"
using namespace std;
float z_pos = -5.0f;
float rot = 0.0f;
GLfloat LightAmbient[] = { 0.5f, 0.5f, 0.5f, 1.0f };
GLfloat LightDiffuse[] = { 1.0f, 1.0f, 1.0f, 1.0f };
GLfloat LightPosition[] = { 0.0f, 0.0f, 2.0f, 1.0f };
/* array to hold texture handles */
GLuint filter; // Which Filter To Use
GLuint texture[3]; // Storage For 3 Textures
GLuint specialTexture[3]; // Storage For 3 Textures
int LoadGLTextures() // Load Bitmaps And Convert To Textures
{
int Status = FALSE; // Status Indicator
AUX_RGBImageRec *TextureImage[1]; // Create Storage Space For The Texture
memset(TextureImage, 0, sizeof(void *) * 1); // Set The Pointer To NULL
// Load The Bitmap, Check For Errors, If Bitmap's Not Found Quit
string textureFile("crate.bmp");
if (TextureImage[0] = LoadBMP((char *)textureFile.c_str()))
{
Status = TRUE;
// Set The Status To TRUE
glGenTextures(3, &texture[0]); // Create Three Textures
// Create Nearest Filtered Texture
glBindTexture(GL_TEXTURE_2D, texture[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
// Create Linear Filtered Texture
glBindTexture(GL_TEXTURE_2D, texture[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
// Create MipMapped Texture
glBindTexture(GL_TEXTURE_2D, texture[2]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
gluBuild2DMipmaps(GL_TEXTURE_2D, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
}
else {
cout << "Error: Cannot load texture file '" << textureFile << "'" << endl;
}
if (TextureImage[0]) // If Texture Exists
{
if (TextureImage[0]->data) // If Texture Image Exists
{
free(TextureImage[0]->data); // Free The Texture Image Memory
}
free(TextureImage[0]); // Free The Image Structure
}
return Status;
// Return The Status
}
int loadSpecialTextures() // Load Bitmaps And Convert To Textures
{
int Status = FALSE; // Status Indicator
AUX_RGBImageRec *TextureImage[1]; // Create Storage Space For The Texture
memset(TextureImage, 0, sizeof(void *) * 1); // Set The Pointer To NULL
// Load The Bitmap, Check For Errors, If Bitmap's Not Found Quit
string textureFile("special.bmp");
if (TextureImage[0] = LoadBMP((char *)textureFile.c_str()))
{
Status = TRUE;
// Set The Status To TRUE
glGenTextures(3, &specialTexture[0]); // Create Three Textures
// Create Nearest Filtered Texture
glBindTexture(GL_TEXTURE_2D, specialTexture[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
// Create Linear Filtered Texture
glBindTexture(GL_TEXTURE_2D, specialTexture[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, 0, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
// Create MipMapped Texture
glBindTexture(GL_TEXTURE_2D, specialTexture[2]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_NEAREST);
gluBuild2DMipmaps(GL_TEXTURE_2D, 3, TextureImage[0]->sizeX,
TextureImage[0]->sizeY, GL_RGB, GL_UNSIGNED_BYTE, TextureImage[0]->data);
}
else {
cout << "Error: Cannot load special texture file '" << textureFile << "'" << endl;
}
if (TextureImage[0]) // If Texture Exists
{
if (TextureImage[0]->data) // If Texture Image Exists
{
free(TextureImage[0]->data); // Free The Texture Image Memory
}
free(TextureImage[0]); // Free The Image Structure
}
return Status;
// Return The Status
}
void resize(int width, int height)
{
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (float)width / (float)height, 1.0, 300.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
void myTimeOut(int id)
{
// called if timer event
// ...advance the state of animation incrementally...
rot += 10;
glutPostRedisplay(); // request redisplay
glutTimerFunc(100, myTimeOut, 0); // request next timer event
}
void myKeyboard(unsigned char key, int x, int y)
{
if ((key == '<') || (key == ',')) z_pos -= 0.1f;
else if ((key == '>') || (key == '.')) z_pos += 0.1f;
else if ((key == 'F') || (key = 'f'))
{
filter += 1;
if (filter>2)
{
filter = 0;
}
printf("filter: %i", filter);
}
}
void mydisplay(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glLoadIdentity();
glTranslatef(0.0, 0.0f, z_pos);
glRotatef(rot, 0, 1, 0);
glEnable(GL_BLEND); // Turn Blending On
glColor4f(1.0, 1.0, 1.0, 1.0); // Blending Color
//glDisable(GL_DEPTH_TEST); // Turn Depth Testing Off
glBindTexture(GL_TEXTURE_2D, specialTexture[filter]);
glBegin(GL_QUADS);
// Front Face
glColor4f(1.0, 1.0, 1.0, 1.0);
glNormal3f(0.0f, 0.0f, 1.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(1.0f, -1.0f, 1.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(1.0f, 1.0f, 1.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 1.0f);
glEnd();
glBindTexture(GL_TEXTURE_2D, texture[filter]);
glBegin(GL_QUADS);
// Back Face
glColor4f(1.0, 0.0, 1.0, 1.0);
glNormal3f(0.0f, 0.0f, -1.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, -1.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(1.0f, 1.0f, -1.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(1.0f, -1.0f, -1.0f);
// Top Face
glColor4f(1.0, 1.0, 0.0, 1.0);
glNormal3f(0.0f, 1.0f, 0.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, 1.0f, 1.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(1.0f, 1.0f, 1.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(1.0f, 1.0f, -1.0f);
// Bottom Face
glColor4f(0.0, 1.0, 1.0, 1.0);
glNormal3f(0.0f, -1.0f, 0.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, -1.0f, -1.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(1.0f, -1.0f, -1.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(1.0f, -1.0f, 1.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f);
// Right face
glColor4f(0.0, 1.0, 0.0, 1.0);
glNormal3f(1.0f, 0.0f, 0.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(1.0f, -1.0f, -1.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(1.0f, 1.0f, -1.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(1.0f, 1.0f, 1.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(1.0f, -1.0f, 1.0f);
// Left Face
glColor4f(1.0, 0.0, 0.0, 1.0);
glNormal3f(-1.0f, 0.0f, 0.0f);
glTexCoord2f(0.0f, 0.0f); glVertex3f(-1.0f, -1.0f, -1.0f);
glTexCoord2f(1.0f, 0.0f); glVertex3f(-1.0f, -1.0f, 1.0f);
glTexCoord2f(1.0f, 1.0f); glVertex3f(-1.0f, 1.0f, 1.0f);
glTexCoord2f(0.0f, 1.0f); glVertex3f(-1.0f, 1.0f, -1.0f);
glEnd();
glFlush();
glutSwapBuffers();
}
void init()
{
if (!LoadGLTextures()) // Jump To Texture Loading Routine
{
return; // If Texture Didn't Load Return FALSE
}
if (!loadSpecialTextures()) // Jump To Texture Loading Routine
{
return; // If Texture Didn't Load Return FALSE
}
glEnable(GL_TEXTURE_2D); // Enable Texture Mapping
glShadeModel(GL_SMOOTH); // Enable Smooth Shading
glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
glClearDepth(1.0f);
// Depth Buffer Setup
glEnable(GL_DEPTH_TEST); // Enables Depth Testing
glDepthFunc(GL_LEQUAL); // The Type Of Depth Testing To Do
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); // Really Nice Perspective Calculations
glLightfv(GL_LIGHT1, GL_AMBIENT, LightAmbient); // Setup The Ambient Light
glLightfv(GL_LIGHT1, GL_DIFFUSE, LightDiffuse); // Setup The Diffuse Light
glLightfv(GL_LIGHT1, GL_POSITION, LightPosition); // Position The Light
glEnable(GL_LIGHT1);
glColor4f(1.0f, 1.0f, 1.0f, 0.5); // Full Brightness. 50 % Alpha
glBlendFunc(GL_SRC_ALPHA, GL_ONE);
return;
}
int _tmain(int argc, _TCHAR* argv[])
{
glutInit(&argc, (char**)argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_DEPTH);
glutInitWindowSize(500, 500);
glutInitWindowPosition(0, 0);
glutCreateWindow("simple");
// callbacks
glutDisplayFunc(mydisplay);
glutKeyboardFunc(myKeyboard);
glutTimerFunc(100, myTimeOut, 0);
glutReshapeFunc(resize);
init();
glutMainLoop();
return 0;
}
| apache-2.0 |
boneman1231/org.apache.felix | trunk/upnp/basedriver/src/main/java/org/apache/felix/upnp/basedriver/export/GeneralActionListener.java | 4850 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.upnp.basedriver.export;
import java.util.Dictionary;
import java.util.Properties;
import org.cybergarage.upnp.Action;
import org.cybergarage.upnp.Argument;
import org.cybergarage.upnp.ArgumentList;
import org.cybergarage.upnp.UPnPStatus;
import org.cybergarage.upnp.control.ActionListener;
import org.osgi.framework.Constants;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.service.upnp.UPnPAction;
import org.osgi.service.upnp.UPnPDevice;
import org.osgi.service.upnp.UPnPException;
import org.osgi.service.upnp.UPnPService;
import org.apache.felix.upnp.basedriver.Activator;
import org.apache.felix.upnp.basedriver.util.Converter;
/*
* @author <a href="mailto:dev@felix.apache.org">Felix Project Team</a>
*/
public class GeneralActionListener implements ServiceListener,ActionListener {
private ServiceReference dev;
private String id;
private boolean open;
/**
* @param osgiServ
*/
public GeneralActionListener(ServiceReference sr, String serviceId) {
try {
Activator.bc.addServiceListener(this,
"("+Constants.SERVICE_ID+"="+sr.getProperty(Constants.SERVICE_ID)+")");
} catch (InvalidSyntaxException ingnored) {}
this.dev=sr;
this.id=serviceId;
this.open=true;
}
/**
* @see org.cybergarage.upnp.control.ActionListener#actionControlReceived(org.cybergarage.upnp.Action)
*/
public synchronized boolean actionControlReceived(Action upnpAct) {
if(!open) return false;
UPnPService osgiServ=null;
try{
osgiServ=((UPnPDevice) Activator.bc.getService(dev)).getService(id);
}catch(Exception ignored){}
if(osgiServ==null)
return exiting(false);
UPnPAction osgiAct = osgiServ.getAction(upnpAct.getName());
Properties inArgs = null;
ArgumentList alIn = upnpAct.getInputArgumentList();
ArgumentList alOut = upnpAct.getOutputArgumentList();
String[] inArg = osgiAct.getInputArgumentNames();
boolean invalidAction=false;
if(inArg!=null){
inArgs = new Properties();
Argument arg;
for (int j = 0; j < inArg.length; j++) {
arg=alIn.getArgument(inArg[j]);
try {
inArgs.put(
inArg[j],
Converter.parseString(
arg.getValue(),
arg.getRelatedStateVariable().getDataType()
/*osgiServ.getStateVariable(arg.getRelatedStateVariableName()).getUPnPDataType()*/
)
);
} catch (Exception e) {
invalidAction=true;
break;
}
}
}
Dictionary outArgs=null;
try {
outArgs=osgiAct.invoke(inArgs);
} catch (UPnPException e) {
//TODO Activator.logger.log()
upnpAct.setStatus(e.getUPnPError_Code(),e.getMessage());
invalidAction=true;
} catch (Exception e){
//TODO Activator.logger.log()
upnpAct.setStatus(UPnPStatus.ACTION_FAILED);
invalidAction=true;
}
if(invalidAction)
return exiting(false);
String[] outArg = osgiAct.getOutputArgumentNames();
if(outArg!=null){
Argument arg;
for (int j = 0; j < outArg.length; j++) {
arg = alOut.getArgument(outArg[j]);
try {
arg.setValue(
Converter.toString(
outArgs.get(outArg[j]),
arg.getRelatedStateVariable().getDataType()
/*osgiServ.getStateVariable(arg.getRelatedStateVariableName()).getUPnPDataType()*/
)
);
} catch (Exception e) {
e.printStackTrace();
return exiting(false);
}
}
}
return exiting(true);
}
/**
* @param b
* @return
*/
private boolean exiting(boolean b) {
Activator.bc.ungetService(dev);
return b;
}
/**
* @see org.osgi.framework.ServiceListener#serviceChanged(org.osgi.framework.ServiceEvent)
*/
public void serviceChanged(ServiceEvent e) {
if(e.getType()==ServiceEvent.UNREGISTERING){
Activator.bc.removeServiceListener(this);
}
}
}
| apache-2.0 |
snahelou/awx | awx/ui/client/src/organizations/linkout/controllers/organizations-inventories.controller.js | 11145 | /*************************************************
* Copyright (c) 2016 Ansible, Inc.
*
* All Rights Reserved
*************************************************/
export default ['$scope', '$rootScope', '$location',
'$stateParams', '$compile', '$filter', 'Rest', 'InventoryList',
'OrgInventoryDataset', 'OrgInventoryList',
'ProcessErrors', 'GetBasePath', 'Wait', 'Find', 'Empty', '$state', 'i18n',
function($scope, $rootScope, $location,
$stateParams, $compile, $filter, Rest, InventoryList,
Dataset, OrgInventoryList,
ProcessErrors, GetBasePath, Wait,
Find, Empty, $state, i18n) {
var list = OrgInventoryList,
orgBase = GetBasePath('organizations');
init();
function init() {
// search init
$scope.list = list;
$scope[`${list.iterator}_dataset`] = Dataset.data;
$scope[list.name] = $scope[`${list.iterator}_dataset`].results;
$rootScope.flashMessage = null;
Rest.setUrl(orgBase + $stateParams.organization_id);
Rest.get()
.success(function(data) {
$scope.organization_name = data.name;
$scope.name = data.name;
$scope.org_id = data.id;
$scope.orgRelatedUrls = data.related;
});
$scope.$watch('inventories', ()=>{
_.forEach($scope.inventories, processInventoryRow);
});
}
function processInventoryRow(item) {
if (item.has_inventory_sources) {
if (item.inventory_sources_with_failures > 0) {
item.syncStatus = 'error';
item.syncTip = item.inventory_sources_with_failures + ' groups with sync failures. Click for details';
} else {
item.syncStatus = 'successful';
item.syncTip = 'No inventory sync failures. Click for details.';
}
} else {
item.syncStatus = 'na';
item.syncTip = 'Not configured for inventory sync.';
item.launch_class = "btn-disabled";
}
if (item.has_active_failures) {
item.hostsStatus = 'eritemror';
item.hostsTip = item.hosts_with_active_failures + ' hosts with failures. Click for details.';
} else if (item.total_hosts) {
item.hostsStatus = 'successful';
item.hostsTip = 'No hosts with failures. Click for details.';
} else {
item.hostsStatus = 'none';
item.hostsTip = 'Inventory contains 0 hosts.';
}
item.kind_label = item.kind === '' ? 'Inventory' : (item.kind === 'smart' ? i18n._('Smart Inventory'): i18n._('Inventory'));
return item;
}
function ellipsis(a) {
if (a.length > 20) {
return a.substr(0, 20) + '...';
}
return a;
}
function attachElem(event, html, title) {
var elem = $(event.target).parent();
try {
elem.tooltip('hide');
elem.popover('destroy');
} catch (err) {
//ignore
}
$('.popover').each(function() {
// remove lingering popover <div>. Seems to be a bug in TB3 RC1
$(this).remove();
});
$('.tooltip').each(function() {
// close any lingering tool tipss
$(this).hide();
});
elem.attr({
"aw-pop-over": html,
"data-popover-title": title,
"data-placement": "right"
});
$compile(elem)($scope);
elem.on('shown.bs.popover', function() {
$('.popover').each(function() {
$compile($(this))($scope); //make nested directives work!
});
$('.popover-content, .popover-title').click(function() {
elem.popover('hide');
});
});
elem.popover('show');
}
if ($scope.removeHostSummaryReady) {
$scope.removeHostSummaryReady();
}
$scope.removeHostSummaryReady = $scope.$on('HostSummaryReady', function(e, event, data) {
var html, title = "Recent Jobs";
Wait('stop');
if (data.count > 0) {
html = "<table class=\"table table-condensed flyout\" style=\"width: 100%\">\n";
html += "<thead>\n";
html += "<tr>";
html += "<th>Status</th>";
html += "<th>Finished</th>";
html += "<th>Name</th>";
html += "</tr>\n";
html += "</thead>\n";
html += "<tbody>\n";
data.results.forEach(function(row) {
html += "<tr>\n";
html += "<td><a href=\"#/jobs/" + row.id + "\" " + "aw-tool-tip=\"" + row.status.charAt(0).toUpperCase() + row.status.slice(1) +
". Click for details\" aw-tip-placement=\"top\"><i class=\"fa icon-job-" + row.status + "\"></i></a></td>\n";
html += "<td>" + ($filter('longDate')(row.finished)).replace(/ /, '<br />') + "</td>";
html += "<td><a href=\"#/jobs/" + row.id + "\" " + "aw-tool-tip=\"" + row.status.charAt(0).toUpperCase() + row.status.slice(1) +
". Click for details\" aw-tip-placement=\"top\">" + ellipsis(row.name) + "</a></td>";
html += "</tr>\n";
});
html += "</tbody>\n";
html += "</table>\n";
} else {
html = "<p>No recent job data available for this inventory.</p>\n";
}
attachElem(event, html, title);
});
if ($scope.removeGroupSummaryReady) {
$scope.removeGroupSummaryReady();
}
$scope.removeGroupSummaryReady = $scope.$on('GroupSummaryReady', function(e, event, inventory, data) {
var html, title;
Wait('stop');
// Build the html for our popover
html = "<table class=\"table table-condensed flyout\" style=\"width: 100%\">\n";
html += "<thead>\n";
html += "<tr>";
html += "<th>Status</th>";
html += "<th>Last Sync</th>";
html += "<th>Group</th>";
html += "</tr>";
html += "</thead>\n";
html += "<tbody>\n";
data.results.forEach(function(row) {
if (row.related.last_update) {
html += "<tr>";
html += "<td><a href=\"\" ng-click=\"viewJob('" + row.related.last_update + "')\" aw-tool-tip=\"" + row.status.charAt(0).toUpperCase() + row.status.slice(1) + ". Click for details\" aw-tip-placement=\"top\"><i class=\"fa icon-job-" + row.status + "\"></i></a></td>";
html += "<td>" + ($filter('longDate')(row.last_updated)).replace(/ /, '<br />') + "</td>";
html += "<td><a href=\"\" ng-click=\"viewJob('" + row.related.last_update + "')\">" + ellipsis(row.summary_fields.group.name) + "</a></td>";
html += "</tr>\n";
} else {
html += "<tr>";
html += "<td><a href=\"\" aw-tool-tip=\"No sync data\" aw-tip-placement=\"top\"><i class=\"fa icon-job-none\"></i></a></td>";
html += "<td>NA</td>";
html += "<td><a href=\"\">" + ellipsis(row.summary_fields.group.name) + "</a></td>";
html += "</tr>\n";
}
});
html += "</tbody>\n";
html += "</table>\n";
title = "Sync Status";
attachElem(event, html, title);
});
$scope.showGroupSummary = function(event, id) {
var inventory;
if (!Empty(id)) {
inventory = Find({ list: $scope.inventories, key: 'id', val: id });
if (inventory.syncStatus !== 'na') {
Wait('start');
Rest.setUrl(inventory.related.inventory_sources + '?or__source=ec2&or__source=rax&order_by=-last_job_run&page_size=5');
Rest.get()
.success(function(data) {
$scope.$emit('GroupSummaryReady', event, inventory, data);
})
.error(function(data, status) {
ProcessErrors($scope, data, status, null, {
hdr: 'Error!',
msg: 'Call to ' + inventory.related.inventory_sources + ' failed. GET returned status: ' + status
});
});
}
}
};
$scope.showHostSummary = function(event, id) {
var url, inventory;
if (!Empty(id)) {
inventory = Find({ list: $scope.inventories, key: 'id', val: id });
if (inventory.total_hosts > 0) {
Wait('start');
url = GetBasePath('jobs') + "?type=job&inventory=" + id + "&failed=";
url += (inventory.has_active_failures) ? 'true' : "false";
url += "&order_by=-finished&page_size=5";
Rest.setUrl(url);
Rest.get()
.success(function(data) {
$scope.$emit('HostSummaryReady', event, data);
})
.error(function(data, status) {
ProcessErrors($scope, data, status, null, {
hdr: 'Error!',
msg: 'Call to ' + url + ' failed. GET returned: ' + status
});
});
}
}
};
$scope.viewJob = function(url) {
// Pull the id out of the URL
var id = url.replace(/^\//, '').split('/')[3];
$state.go('inventorySyncStdout', { id: id });
};
$scope.editInventory = function (inventory) {
if(inventory.kind && inventory.kind === 'smart') {
$state.go('inventories.editSmartInventory', {smartinventory_id: inventory.id});
}
else {
$state.go('inventories.edit', {inventory_id: inventory.id});
}
};
// Failed jobs link. Go to the jobs tabs, find all jobs for the inventory and sort by status
$scope.viewJobs = function(id) {
$location.url('/jobs/?inventory__int=' + id);
};
$scope.viewFailedJobs = function(id) {
$location.url('/jobs/?inventory__int=' + id + '&status=failed');
};
$scope.formCancel = function() {
$state.go('organizations');
};
}
];
| apache-2.0 |
JuKu/test-rpg-game-server | game-database/src/main/java/com/jukusoft/libgdx/rpg/database/json/JSONSerializable.java | 243 | package com.jukusoft.libgdx.rpg.database.json;
import org.json.JSONObject;
/**
* Created by Justin on 24.03.2017.
*/
public interface JSONSerializable {
public JSONObject toJSON ();
public void loadFromJSON (JSONObject json);
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-servicecatalog/src/main/java/com/amazonaws/services/servicecatalog/AbstractAWSServiceCatalog.java | 18650 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.servicecatalog;
import javax.annotation.Generated;
import com.amazonaws.services.servicecatalog.model.*;
import com.amazonaws.*;
/**
* Abstract implementation of {@code AWSServiceCatalog}. Convenient method forms pass through to the corresponding
* overload that takes a request object, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAWSServiceCatalog implements AWSServiceCatalog {
protected AbstractAWSServiceCatalog() {
}
@Override
public void setEndpoint(String endpoint) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void setRegion(com.amazonaws.regions.Region region) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AcceptPortfolioShareResult acceptPortfolioShare(AcceptPortfolioShareRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateBudgetWithResourceResult associateBudgetWithResource(AssociateBudgetWithResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociatePrincipalWithPortfolioResult associatePrincipalWithPortfolio(AssociatePrincipalWithPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateProductWithPortfolioResult associateProductWithPortfolio(AssociateProductWithPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateServiceActionWithProvisioningArtifactResult associateServiceActionWithProvisioningArtifact(
AssociateServiceActionWithProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociateTagOptionWithResourceResult associateTagOptionWithResource(AssociateTagOptionWithResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public BatchAssociateServiceActionWithProvisioningArtifactResult batchAssociateServiceActionWithProvisioningArtifact(
BatchAssociateServiceActionWithProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public BatchDisassociateServiceActionFromProvisioningArtifactResult batchDisassociateServiceActionFromProvisioningArtifact(
BatchDisassociateServiceActionFromProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CopyProductResult copyProduct(CopyProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateConstraintResult createConstraint(CreateConstraintRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreatePortfolioResult createPortfolio(CreatePortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreatePortfolioShareResult createPortfolioShare(CreatePortfolioShareRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateProductResult createProduct(CreateProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateProvisionedProductPlanResult createProvisionedProductPlan(CreateProvisionedProductPlanRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateProvisioningArtifactResult createProvisioningArtifact(CreateProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateServiceActionResult createServiceAction(CreateServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateTagOptionResult createTagOption(CreateTagOptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteConstraintResult deleteConstraint(DeleteConstraintRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeletePortfolioResult deletePortfolio(DeletePortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeletePortfolioShareResult deletePortfolioShare(DeletePortfolioShareRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteProductResult deleteProduct(DeleteProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteProvisionedProductPlanResult deleteProvisionedProductPlan(DeleteProvisionedProductPlanRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteProvisioningArtifactResult deleteProvisioningArtifact(DeleteProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteServiceActionResult deleteServiceAction(DeleteServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteTagOptionResult deleteTagOption(DeleteTagOptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeConstraintResult describeConstraint(DescribeConstraintRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeCopyProductStatusResult describeCopyProductStatus(DescribeCopyProductStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePortfolioResult describePortfolio(DescribePortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePortfolioShareStatusResult describePortfolioShareStatus(DescribePortfolioShareStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePortfolioSharesResult describePortfolioShares(DescribePortfolioSharesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProductResult describeProduct(DescribeProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProductAsAdminResult describeProductAsAdmin(DescribeProductAsAdminRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProductViewResult describeProductView(DescribeProductViewRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProvisionedProductResult describeProvisionedProduct(DescribeProvisionedProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProvisionedProductPlanResult describeProvisionedProductPlan(DescribeProvisionedProductPlanRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProvisioningArtifactResult describeProvisioningArtifact(DescribeProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeProvisioningParametersResult describeProvisioningParameters(DescribeProvisioningParametersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeRecordResult describeRecord(DescribeRecordRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeServiceActionResult describeServiceAction(DescribeServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeServiceActionExecutionParametersResult describeServiceActionExecutionParameters(DescribeServiceActionExecutionParametersRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeTagOptionResult describeTagOption(DescribeTagOptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisableAWSOrganizationsAccessResult disableAWSOrganizationsAccess(DisableAWSOrganizationsAccessRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateBudgetFromResourceResult disassociateBudgetFromResource(DisassociateBudgetFromResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociatePrincipalFromPortfolioResult disassociatePrincipalFromPortfolio(DisassociatePrincipalFromPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateProductFromPortfolioResult disassociateProductFromPortfolio(DisassociateProductFromPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateServiceActionFromProvisioningArtifactResult disassociateServiceActionFromProvisioningArtifact(
DisassociateServiceActionFromProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DisassociateTagOptionFromResourceResult disassociateTagOptionFromResource(DisassociateTagOptionFromResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public EnableAWSOrganizationsAccessResult enableAWSOrganizationsAccess(EnableAWSOrganizationsAccessRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ExecuteProvisionedProductPlanResult executeProvisionedProductPlan(ExecuteProvisionedProductPlanRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ExecuteProvisionedProductServiceActionResult executeProvisionedProductServiceAction(ExecuteProvisionedProductServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetAWSOrganizationsAccessStatusResult getAWSOrganizationsAccessStatus(GetAWSOrganizationsAccessStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetProvisionedProductOutputsResult getProvisionedProductOutputs(GetProvisionedProductOutputsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ImportAsProvisionedProductResult importAsProvisionedProduct(ImportAsProvisionedProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListAcceptedPortfolioSharesResult listAcceptedPortfolioShares(ListAcceptedPortfolioSharesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListBudgetsForResourceResult listBudgetsForResource(ListBudgetsForResourceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListConstraintsForPortfolioResult listConstraintsForPortfolio(ListConstraintsForPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListLaunchPathsResult listLaunchPaths(ListLaunchPathsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListOrganizationPortfolioAccessResult listOrganizationPortfolioAccess(ListOrganizationPortfolioAccessRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListPortfolioAccessResult listPortfolioAccess(ListPortfolioAccessRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListPortfoliosResult listPortfolios(ListPortfoliosRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListPortfoliosForProductResult listPortfoliosForProduct(ListPortfoliosForProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListPrincipalsForPortfolioResult listPrincipalsForPortfolio(ListPrincipalsForPortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListProvisionedProductPlansResult listProvisionedProductPlans(ListProvisionedProductPlansRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListProvisioningArtifactsResult listProvisioningArtifacts(ListProvisioningArtifactsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListProvisioningArtifactsForServiceActionResult listProvisioningArtifactsForServiceAction(ListProvisioningArtifactsForServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListRecordHistoryResult listRecordHistory(ListRecordHistoryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListResourcesForTagOptionResult listResourcesForTagOption(ListResourcesForTagOptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListServiceActionsResult listServiceActions(ListServiceActionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListServiceActionsForProvisioningArtifactResult listServiceActionsForProvisioningArtifact(ListServiceActionsForProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListStackInstancesForProvisionedProductResult listStackInstancesForProvisionedProduct(ListStackInstancesForProvisionedProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListTagOptionsResult listTagOptions(ListTagOptionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ProvisionProductResult provisionProduct(ProvisionProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RejectPortfolioShareResult rejectPortfolioShare(RejectPortfolioShareRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ScanProvisionedProductsResult scanProvisionedProducts(ScanProvisionedProductsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SearchProductsResult searchProducts(SearchProductsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SearchProductsAsAdminResult searchProductsAsAdmin(SearchProductsAsAdminRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public SearchProvisionedProductsResult searchProvisionedProducts(SearchProvisionedProductsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public TerminateProvisionedProductResult terminateProvisionedProduct(TerminateProvisionedProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateConstraintResult updateConstraint(UpdateConstraintRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdatePortfolioResult updatePortfolio(UpdatePortfolioRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdatePortfolioShareResult updatePortfolioShare(UpdatePortfolioShareRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateProductResult updateProduct(UpdateProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateProvisionedProductResult updateProvisionedProduct(UpdateProvisionedProductRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateProvisionedProductPropertiesResult updateProvisionedProductProperties(UpdateProvisionedProductPropertiesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateProvisioningArtifactResult updateProvisioningArtifact(UpdateProvisioningArtifactRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateServiceActionResult updateServiceAction(UpdateServiceActionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateTagOptionResult updateTagOption(UpdateTagOptionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
}
| apache-2.0 |
TieWei/nova | nova/tests/test_quota.py | 97298 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo.config import cfg
from nova import compute
from nova.compute import flavors
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqa_api
from nova.db.sqlalchemy import models as sqa_models
from nova import exception
from nova.openstack.common import timeutils
from nova import quota
from nova import test
import nova.tests.image.fake
CONF = cfg.CONF
CONF.import_opt('compute_driver', 'nova.virt.driver')
class QuotaIntegrationTestCase(test.TestCase):
def setUp(self):
super(QuotaIntegrationTestCase, self).setUp()
self.flags(compute_driver='nova.virt.fake.FakeDriver',
quota_instances=2,
quota_cores=4,
quota_floating_ips=1,
network_manager='nova.network.manager.FlatDHCPManager')
# Apparently needed by the RPC tests...
self.network = self.start_service('network')
self.user_id = 'admin'
self.project_id = 'admin'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
nova.tests.image.fake.stub_out_image_service(self.stubs)
def tearDown(self):
super(QuotaIntegrationTestCase, self).tearDown()
nova.tests.image.fake.FakeImageService_reset()
def _create_instance(self, cores=2):
"""Create a test instance."""
inst = {}
inst['image_id'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = '3' # m1.large
inst['vcpus'] = cores
return db.instance_create(self.context, inst)
def test_too_many_instances(self):
instance_uuids = []
for i in range(CONF.quota_instances):
instance = self._create_instance()
instance_uuids.append(instance['uuid'])
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
try:
compute.API().create(self.context, min_count=1, max_count=1,
instance_type=inst_type, image_href=image_uuid)
except exception.QuotaError, e:
expected_kwargs = {'code': 413, 'resource': 'cores', 'req': 1,
'used': 4, 'allowed': 4, 'overs': 'cores,instances'}
self.assertEqual(e.kwargs, expected_kwargs)
else:
self.fail('Expected QuotaError exception')
for instance_uuid in instance_uuids:
db.instance_destroy(self.context, instance_uuid)
def test_too_many_cores(self):
instance = self._create_instance(cores=4)
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
try:
compute.API().create(self.context, min_count=1, max_count=1,
instance_type=inst_type, image_href=image_uuid)
except exception.QuotaError, e:
expected_kwargs = {'code': 413, 'resource': 'cores', 'req': 1,
'used': 4, 'allowed': 4, 'overs': 'cores'}
self.assertEqual(e.kwargs, expected_kwargs)
else:
self.fail('Expected QuotaError exception')
db.instance_destroy(self.context, instance['uuid'])
def test_many_cores_with_unlimited_quota(self):
# Setting cores quota to unlimited:
self.flags(quota_cores=-1)
instance = self._create_instance(cores=4)
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
db.instance_destroy(self.context, instance['uuid'])
def test_too_many_addresses(self):
address = '192.168.0.100'
db.floating_ip_create(context.get_admin_context(),
{'address': address,
'project_id': self.project_id})
self.assertRaises(exception.QuotaError,
self.network.allocate_floating_ip,
self.context,
self.project_id)
db.floating_ip_destroy(context.get_admin_context(), address)
def test_auto_assigned(self):
address = '192.168.0.100'
db.floating_ip_create(context.get_admin_context(),
{'address': address,
'project_id': self.project_id})
# auto allocated addresses should not be counted
self.assertRaises(exception.NoMoreFloatingIps,
self.network.allocate_floating_ip,
self.context,
self.project_id,
True)
db.floating_ip_destroy(context.get_admin_context(), address)
def test_too_many_metadata_items(self):
metadata = {}
for i in range(CONF.quota_metadata_items + 1):
metadata['key%s' % i] = 'value%s' % i
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid,
metadata=metadata)
def _create_with_injected_files(self, files):
api = compute.API()
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context, min_count=1, max_count=1,
instance_type=inst_type, image_href=image_uuid,
injected_files=files)
def test_no_injected_files(self):
api = compute.API()
inst_type = flavors.get_flavor_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context,
instance_type=inst_type,
image_href=image_uuid)
def test_max_injected_files(self):
files = []
for i in xrange(CONF.quota_injected_files):
files.append(('/my/path%d' % i, 'config = test\n'))
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_files(self):
files = []
for i in xrange(CONF.quota_injected_files + 1):
files.append(('/my/path%d' % i, 'my\ncontent%d\n' % i))
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_max_injected_file_content_bytes(self):
max = CONF.quota_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max)])
files = [('/test/path', content)]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_content_bytes(self):
max = CONF.quota_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max + 1)])
files = [('/test/path', content)]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_max_injected_file_path_bytes(self):
max = CONF.quota_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max)])
files = [(path, 'config = quotatest')]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_path_bytes(self):
max = CONF.quota_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max + 1)])
files = [(path, 'config = quotatest')]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_reservation_expire(self):
self.useFixture(test.TimeOverride())
def assertInstancesReserved(reserved):
result = quota.QUOTAS.get_project_quotas(self.context,
self.context.project_id)
self.assertEqual(result['instances']['reserved'], reserved)
quota.QUOTAS.reserve(self.context,
expire=60,
instances=2)
assertInstancesReserved(2)
timeutils.advance_time_seconds(80)
quota.QUOTAS.expire(self.context)
assertInstancesReserved(0)
class FakeContext(object):
def __init__(self, project_id, quota_class):
self.is_admin = False
self.user_id = 'fake_user'
self.project_id = project_id
self.quota_class = quota_class
self.read_deleted = 'no'
def elevated(self):
elevated = self.__class__(self.project_id, self.quota_class)
elevated.is_admin = True
return elevated
class FakeDriver(object):
def __init__(self, by_project=None, by_user=None, by_class=None,
reservations=None):
self.called = []
self.by_project = by_project or {}
self.by_user = by_user or {}
self.by_class = by_class or {}
self.reservations = reservations or []
def get_by_project_and_user(self, context, project_id, user_id, resource):
self.called.append(('get_by_project_and_user',
context, project_id, user_id, resource))
try:
return self.by_user[user_id][resource]
except KeyError:
raise exception.ProjectUserQuotaNotFound(project_id=project_id,
user_id=user_id)
def get_by_project(self, context, project_id, resource):
self.called.append(('get_by_project', context, project_id, resource))
try:
return self.by_project[project_id][resource]
except KeyError:
raise exception.ProjectQuotaNotFound(project_id=project_id)
def get_by_class(self, context, quota_class, resource):
self.called.append(('get_by_class', context, quota_class, resource))
try:
return self.by_class[quota_class][resource]
except KeyError:
raise exception.QuotaClassNotFound(class_name=quota_class)
def get_defaults(self, context, resources):
self.called.append(('get_defaults', context, resources))
return resources
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
self.called.append(('get_class_quotas', context, resources,
quota_class, defaults))
return resources
def get_user_quotas(self, context, resources, project_id, user_id,
quota_class=None, defaults=True, usages=True):
self.called.append(('get_user_quotas', context, resources,
project_id, user_id, quota_class, defaults,
usages))
return resources
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True, usages=True,
remains=False):
self.called.append(('get_project_quotas', context, resources,
project_id, quota_class, defaults, usages,
remains))
return resources
def limit_check(self, context, resources, values, project_id=None,
user_id=None):
self.called.append(('limit_check', context, resources,
values, project_id, user_id))
def reserve(self, context, resources, deltas, expire=None,
project_id=None, user_id=None):
self.called.append(('reserve', context, resources, deltas,
expire, project_id, user_id))
return self.reservations
def commit(self, context, reservations, project_id=None, user_id=None):
self.called.append(('commit', context, reservations, project_id,
user_id))
def rollback(self, context, reservations, project_id=None, user_id=None):
self.called.append(('rollback', context, reservations, project_id,
user_id))
def usage_reset(self, context, resources):
self.called.append(('usage_reset', context, resources))
def destroy_all_by_project_and_user(self, context, project_id, user_id):
self.called.append(('destroy_all_by_project_and_user', context,
project_id, user_id))
def destroy_all_by_project(self, context, project_id):
self.called.append(('destroy_all_by_project', context, project_id))
def expire(self, context):
self.called.append(('expire', context))
class BaseResourceTestCase(test.TestCase):
def test_no_flag(self):
resource = quota.BaseResource('test_resource')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, None)
self.assertEqual(resource.default, -1)
def test_with_flag(self):
# We know this flag exists, so use it...
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, 'quota_instances')
self.assertEqual(resource.default, 10)
def test_with_flag_no_quota(self):
self.flags(quota_instances=-1)
resource = quota.BaseResource('test_resource', 'quota_instances')
self.assertEqual(resource.name, 'test_resource')
self.assertEqual(resource.flag, 'quota_instances')
self.assertEqual(resource.default, -1)
def test_quota_no_project_no_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver()
context = FakeContext(None, None)
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 10)
def test_quota_with_project_no_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
))
context = FakeContext('test_project', None)
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 15)
def test_quota_no_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=20),
))
context = FakeContext(None, 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 20)
def test_quota_with_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
),
by_class=dict(
test_class=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(quota_value, 15)
def test_quota_override_project_with_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
override_project=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
project_id='override_project')
self.assertEqual(quota_value, 20)
def test_quota_with_project_override_class(self):
self.flags(quota_instances=10)
resource = quota.BaseResource('test_resource', 'quota_instances')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=15),
override_class=dict(test_resource=20),
))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
quota_class='override_class')
self.assertEqual(quota_value, 20)
class QuotaEngineTestCase(test.TestCase):
def test_init(self):
quota_obj = quota.QuotaEngine()
self.assertEqual(quota_obj._resources, {})
self.assertTrue(isinstance(quota_obj._driver, quota.DbQuotaDriver))
def test_init_override_string(self):
quota_obj = quota.QuotaEngine(
quota_driver_class='nova.tests.test_quota.FakeDriver')
self.assertEqual(quota_obj._resources, {})
self.assertTrue(isinstance(quota_obj._driver, FakeDriver))
def test_init_override_obj(self):
quota_obj = quota.QuotaEngine(quota_driver_class=FakeDriver)
self.assertEqual(quota_obj._resources, {})
self.assertEqual(quota_obj._driver, FakeDriver)
def test_register_resource(self):
quota_obj = quota.QuotaEngine()
resource = quota.AbsoluteResource('test_resource')
quota_obj.register_resource(resource)
self.assertEqual(quota_obj._resources, dict(test_resource=resource))
def test_register_resources(self):
quota_obj = quota.QuotaEngine()
resources = [
quota.AbsoluteResource('test_resource1'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource3'),
]
quota_obj.register_resources(resources)
self.assertEqual(quota_obj._resources, dict(
test_resource1=resources[0],
test_resource2=resources[1],
test_resource3=resources[2],
))
def test_get_by_project_and_user(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(by_user=dict(
fake_user=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_project_and_user(context, 'test_project',
'fake_user', 'test_resource')
self.assertEqual(driver.called, [
('get_by_project_and_user', context, 'test_project',
'fake_user', 'test_resource'),
])
self.assertEqual(result, 42)
def test_get_by_project(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_project(context, 'test_project',
'test_resource')
self.assertEqual(driver.called, [
('get_by_project', context, 'test_project', 'test_resource'),
])
self.assertEqual(result, 42)
def test_get_by_class(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_class(context, 'test_class', 'test_resource')
self.assertEqual(driver.called, [
('get_by_class', context, 'test_class', 'test_resource'),
])
self.assertEqual(result, 42)
def _make_quota_obj(self, driver):
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
resources = [
quota.AbsoluteResource('test_resource4'),
quota.AbsoluteResource('test_resource3'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource1'),
]
quota_obj.register_resources(resources)
return quota_obj
def test_get_defaults(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result = quota_obj.get_defaults(context)
self.assertEqual(driver.called, [
('get_defaults', context, quota_obj._resources),
])
self.assertEqual(result, quota_obj._resources)
def test_get_class_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_class_quotas(context, 'test_class')
result2 = quota_obj.get_class_quotas(context, 'test_class', False)
self.assertEqual(driver.called, [
('get_class_quotas', context, quota_obj._resources,
'test_class', True),
('get_class_quotas', context, quota_obj._resources,
'test_class', False),
])
self.assertEqual(result1, quota_obj._resources)
self.assertEqual(result2, quota_obj._resources)
def test_get_user_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_user_quotas(context, 'test_project',
'fake_user')
result2 = quota_obj.get_user_quotas(context, 'test_project',
'fake_user',
quota_class='test_class',
defaults=False,
usages=False)
self.assertEqual(driver.called, [
('get_user_quotas', context, quota_obj._resources,
'test_project', 'fake_user', None, True, True),
('get_user_quotas', context, quota_obj._resources,
'test_project', 'fake_user', 'test_class', False, False),
])
self.assertEqual(result1, quota_obj._resources)
self.assertEqual(result2, quota_obj._resources)
def test_get_project_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_project_quotas(context, 'test_project')
result2 = quota_obj.get_project_quotas(context, 'test_project',
quota_class='test_class',
defaults=False,
usages=False)
self.assertEqual(driver.called, [
('get_project_quotas', context, quota_obj._resources,
'test_project', None, True, True, False),
('get_project_quotas', context, quota_obj._resources,
'test_project', 'test_class', False, False, False),
])
self.assertEqual(result1, quota_obj._resources)
self.assertEqual(result2, quota_obj._resources)
def test_count_no_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource5',
True, foo='bar')
def test_count_wrong_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource1',
True, foo='bar')
def test_count(self):
def fake_count(context, *args, **kwargs):
self.assertEqual(args, (True,))
self.assertEqual(kwargs, dict(foo='bar'))
return 5
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.register_resource(quota.CountableResource('test_resource5',
fake_count))
result = quota_obj.count(context, 'test_resource5', True, foo='bar')
self.assertEqual(result, 5)
def test_limit_check(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.limit_check(context, test_resource1=4, test_resource2=3,
test_resource3=2, test_resource4=1)
self.assertEqual(driver.called, [
('limit_check', context, quota_obj._resources, dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1,
), None, None),
])
def test_reserve(self):
context = FakeContext(None, None)
driver = FakeDriver(reservations=[
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.reserve(context, test_resource1=4,
test_resource2=3, test_resource3=2,
test_resource4=1)
result2 = quota_obj.reserve(context, expire=3600,
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
result3 = quota_obj.reserve(context, project_id='fake_project',
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
self.assertEqual(driver.called, [
('reserve', context, quota_obj._resources, dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1,
), None, None, None),
('reserve', context, quota_obj._resources, dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4,
), 3600, None, None),
('reserve', context, quota_obj._resources, dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4,
), None, 'fake_project', None),
])
self.assertEqual(result1, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
self.assertEqual(result2, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
self.assertEqual(result3, [
'resv-01', 'resv-02', 'resv-03', 'resv-04',
])
def test_commit(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.commit(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual(driver.called, [
('commit', context, ['resv-01', 'resv-02', 'resv-03'], None,
None),
])
def test_rollback(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.rollback(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual(driver.called, [
('rollback', context, ['resv-01', 'resv-02', 'resv-03'], None,
None),
])
def test_usage_reset(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.usage_reset(context, ['res1', 'res2', 'res3'])
self.assertEqual(driver.called, [
('usage_reset', context, ['res1', 'res2', 'res3']),
])
def test_destroy_all_by_project_and_user(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.destroy_all_by_project_and_user(context,
'test_project', 'fake_user')
self.assertEqual(driver.called, [
('destroy_all_by_project_and_user', context, 'test_project',
'fake_user'),
])
def test_destroy_all_by_project(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.destroy_all_by_project(context, 'test_project')
self.assertEqual(driver.called, [
('destroy_all_by_project', context, 'test_project'),
])
def test_expire(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.expire(context)
self.assertEqual(driver.called, [
('expire', context),
])
def test_resources(self):
quota_obj = self._make_quota_obj(None)
self.assertEqual(quota_obj.resources,
['test_resource1', 'test_resource2',
'test_resource3', 'test_resource4'])
class DbQuotaDriverTestCase(test.TestCase):
def setUp(self):
super(DbQuotaDriverTestCase, self).setUp()
self.flags(quota_instances=10,
quota_cores=20,
quota_ram=50 * 1024,
quota_floating_ips=10,
quota_fixed_ips=10,
quota_metadata_items=128,
quota_injected_files=5,
quota_injected_file_content_bytes=10 * 1024,
quota_injected_file_path_bytes=255,
quota_security_groups=10,
quota_security_group_rules=20,
reservation_expire=86400,
until_refresh=0,
max_age=0,
)
self.driver = quota.DbQuotaDriver()
self.calls = []
self.useFixture(test.TimeOverride())
def test_get_defaults(self):
# Use our pre-defined resources
self._stub_quota_class_get_default()
result = self.driver.get_defaults(None, quota.QUOTAS._resources)
self.assertEqual(result, dict(
instances=5,
cores=20,
ram=25 * 1024,
floating_ips=10,
fixed_ips=10,
metadata_items=64,
injected_files=5,
injected_file_content_bytes=5 * 1024,
injected_file_path_bytes=255,
security_groups=10,
security_group_rules=20,
key_pairs=100,
))
def _stub_quota_class_get_default(self):
# Stub out quota_class_get_default
def fake_qcgd(context):
self.calls.append('quota_class_get_default')
return dict(
instances=5,
ram=25 * 1024,
metadata_items=64,
injected_file_content_bytes=5 * 1024,
)
self.stubs.Set(db, 'quota_class_get_default', fake_qcgd)
def _stub_quota_class_get_all_by_name(self):
# Stub out quota_class_get_all_by_name
def fake_qcgabn(context, quota_class):
self.calls.append('quota_class_get_all_by_name')
self.assertEqual(quota_class, 'test_class')
return dict(
instances=5,
ram=25 * 1024,
metadata_items=64,
injected_file_content_bytes=5 * 1024,
)
self.stubs.Set(db, 'quota_class_get_all_by_name', fake_qcgabn)
def test_get_class_quotas(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class')
self.assertEqual(self.calls, ['quota_class_get_all_by_name'])
self.assertEqual(result, dict(
instances=5,
cores=20,
ram=25 * 1024,
floating_ips=10,
fixed_ips=10,
metadata_items=64,
injected_files=5,
injected_file_content_bytes=5 * 1024,
injected_file_path_bytes=255,
security_groups=10,
security_group_rules=20,
key_pairs=100,
))
def test_get_class_quotas_no_defaults(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class', False)
self.assertEqual(self.calls, ['quota_class_get_all_by_name'])
self.assertEqual(result, dict(
instances=5,
ram=25 * 1024,
metadata_items=64,
injected_file_content_bytes=5 * 1024,
))
def _stub_get_by_project_and_user(self):
def fake_qgabpau(context, project_id, user_id):
self.calls.append('quota_get_all_by_project_and_user')
self.assertEqual(project_id, 'test_project')
self.assertEqual(user_id, 'fake_user')
return dict(
cores=10,
injected_files=2,
injected_file_path_bytes=127,
)
def fake_qgabp(context, project_id):
self.calls.append('quota_get_all_by_project')
self.assertEqual(project_id, 'test_project')
return {
'cores': 10,
'injected_files': 2,
'injected_file_path_bytes': 127,
}
def fake_qugabpau(context, project_id, user_id):
self.calls.append('quota_usage_get_all_by_project_and_user')
self.assertEqual(project_id, 'test_project')
self.assertEqual(user_id, 'fake_user')
return dict(
instances=dict(in_use=2, reserved=2),
cores=dict(in_use=4, reserved=4),
ram=dict(in_use=10 * 1024, reserved=0),
floating_ips=dict(in_use=2, reserved=0),
metadata_items=dict(in_use=0, reserved=0),
injected_files=dict(in_use=0, reserved=0),
injected_file_content_bytes=dict(in_use=0, reserved=0),
injected_file_path_bytes=dict(in_use=0, reserved=0),
)
self.stubs.Set(db, 'quota_get_all_by_project_and_user', fake_qgabpau)
self.stubs.Set(db, 'quota_get_all_by_project', fake_qgabp)
self.stubs.Set(db, 'quota_usage_get_all_by_project_and_user',
fake_qugabpau)
self._stub_quota_class_get_all_by_name()
def test_get_user_quotas(self):
self.maxDiff = None
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user')
self.assertEqual(self.calls, [
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def _stub_get_by_project(self):
def fake_qgabp(context, project_id):
self.calls.append('quota_get_all_by_project')
self.assertEqual(project_id, 'test_project')
return dict(
cores=10,
injected_files=2,
injected_file_path_bytes=127,
)
def fake_qugabp(context, project_id):
self.calls.append('quota_usage_get_all_by_project')
self.assertEqual(project_id, 'test_project')
return dict(
instances=dict(in_use=2, reserved=2),
cores=dict(in_use=4, reserved=4),
ram=dict(in_use=10 * 1024, reserved=0),
floating_ips=dict(in_use=2, reserved=0),
metadata_items=dict(in_use=0, reserved=0),
injected_files=dict(in_use=0, reserved=0),
injected_file_content_bytes=dict(in_use=0, reserved=0),
injected_file_path_bytes=dict(in_use=0, reserved=0),
)
self.stubs.Set(db, 'quota_get_all_by_project', fake_qgabp)
self.stubs.Set(db, 'quota_usage_get_all_by_project', fake_qugabp)
self._stub_quota_class_get_all_by_name()
self._stub_quota_class_get_default()
def test_get_project_quotas(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
'quota_class_get_default',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_user_quotas_alt_context_no_class(self):
self.maxDiff = None
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', None),
quota.QUOTAS._resources, 'test_project', 'fake_user')
self.assertEqual(self.calls, [
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
])
self.assertEqual(result, dict(
instances=dict(
limit=10,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=50 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=128,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=10 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_alt_context_no_class(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_default',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_user_quotas_alt_context_with_class(self):
self.maxDiff = None
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user',
quota_class='test_class')
self.assertEqual(self.calls, [
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_alt_context_with_class(self):
self.maxDiff = None
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project', quota_class='test_class')
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
'quota_class_get_default',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
in_use=2,
reserved=2,
),
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
ram=dict(
limit=25 * 1024,
in_use=10 * 1024,
reserved=0,
),
floating_ips=dict(
limit=10,
in_use=2,
reserved=0,
),
fixed_ips=dict(
limit=10,
in_use=0,
reserved=0,
),
metadata_items=dict(
limit=64,
in_use=0,
reserved=0,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
security_groups=dict(
limit=10,
in_use=0,
reserved=0,
),
security_group_rules=dict(
limit=20,
in_use=0,
reserved=0,
),
key_pairs=dict(
limit=100,
in_use=0,
reserved=0,
),
))
def test_get_user_quotas_no_defaults(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user',
defaults=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
))
def test_get_project_quotas_no_defaults(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', defaults=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name',
'quota_class_get_default',
])
self.assertEqual(result, dict(
cores=dict(
limit=10,
in_use=4,
reserved=4,
),
injected_files=dict(
limit=2,
in_use=0,
reserved=0,
),
injected_file_path_bytes=dict(
limit=127,
in_use=0,
reserved=0,
),
))
def test_get_user_quotas_no_usages(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user', usages=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_class_get_all_by_name',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
),
cores=dict(
limit=10,
),
ram=dict(
limit=25 * 1024,
),
floating_ips=dict(
limit=10,
),
fixed_ips=dict(
limit=10,
),
metadata_items=dict(
limit=64,
),
injected_files=dict(
limit=2,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
),
injected_file_path_bytes=dict(
limit=127,
),
security_groups=dict(
limit=10,
),
security_group_rules=dict(
limit=20,
),
key_pairs=dict(
limit=100,
),
))
def test_get_project_quotas_no_usages(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', usages=False)
self.assertEqual(self.calls, [
'quota_get_all_by_project',
'quota_class_get_all_by_name',
'quota_class_get_default',
])
self.assertEqual(result, dict(
instances=dict(
limit=5,
),
cores=dict(
limit=10,
),
ram=dict(
limit=25 * 1024,
),
floating_ips=dict(
limit=10,
),
fixed_ips=dict(
limit=10,
),
metadata_items=dict(
limit=64,
),
injected_files=dict(
limit=2,
),
injected_file_content_bytes=dict(
limit=5 * 1024,
),
injected_file_path_bytes=dict(
limit=127,
),
security_groups=dict(
limit=10,
),
security_group_rules=dict(
limit=20,
),
key_pairs=dict(
limit=100,
),
))
def _stub_get_settable_quotas(self):
def fake_get_project_quotas(context, resources, project_id,
quota_class=None, defaults=True,
usages=True, remains=False):
self.calls.append('get_project_quotas')
result = {}
for k, v in resources.items():
if k == 'instances':
remains = v.default - 5
in_use = 1
else:
remains = v.default
in_use = 0
result[k] = {'limit': v.default, 'in_use': in_use,
'reserved': 0, 'remains': remains}
return result
def fake_get_user_quotas(context, resources, project_id, user_id,
quota_class=None, defaults=True,
usages=True):
self.calls.append('get_user_quotas')
result = {}
for k, v in resources.items():
if k == 'instances':
in_use = 1
else:
in_use = 0
result[k] = {'limit': v.default,
'in_use': in_use, 'reserved': 0}
return result
def fake_qgabpau(context, project_id, user_id):
self.calls.append('quota_get_all_by_project_and_user')
return {'instances': 2}
self.stubs.Set(self.driver, 'get_project_quotas',
fake_get_project_quotas)
self.stubs.Set(self.driver, 'get_user_quotas',
fake_get_user_quotas)
self.stubs.Set(db, 'quota_get_all_by_project_and_user',
fake_qgabpau)
def test_get_settable_quotas_with_user(self):
self._stub_get_settable_quotas()
result = self.driver.get_settable_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', user_id='test_user')
self.assertEqual(self.calls, [
'get_project_quotas',
'get_user_quotas',
'quota_get_all_by_project_and_user',
])
self.assertEqual(result, {
'instances': {
'minimum': 1,
'maximum': 7,
},
'cores': {
'minimum': 0,
'maximum': 20,
},
'ram': {
'minimum': 0,
'maximum': 50 * 1024,
},
'floating_ips': {
'minimum': 0,
'maximum': 10,
},
'fixed_ips': {
'minimum': 0,
'maximum': 10,
},
'metadata_items': {
'minimum': 0,
'maximum': 128,
},
'injected_files': {
'minimum': 0,
'maximum': 5,
},
'injected_file_content_bytes': {
'minimum': 0,
'maximum': 10 * 1024,
},
'injected_file_path_bytes': {
'minimum': 0,
'maximum': 255,
},
'security_groups': {
'minimum': 0,
'maximum': 10,
},
'security_group_rules': {
'minimum': 0,
'maximum': 20,
},
'key_pairs': {
'minimum': 0,
'maximum': 100,
},
})
def test_get_settable_quotas_without_user(self):
self._stub_get_settable_quotas()
result = self.driver.get_settable_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(self.calls, [
'get_project_quotas',
])
self.assertEqual(result, {
'instances': {
'minimum': 5,
'maximum': -1,
},
'cores': {
'minimum': 0,
'maximum': -1,
},
'ram': {
'minimum': 0,
'maximum': -1,
},
'floating_ips': {
'minimum': 0,
'maximum': -1,
},
'fixed_ips': {
'minimum': 0,
'maximum': -1,
},
'metadata_items': {
'minimum': 0,
'maximum': -1,
},
'injected_files': {
'minimum': 0,
'maximum': -1,
},
'injected_file_content_bytes': {
'minimum': 0,
'maximum': -1,
},
'injected_file_path_bytes': {
'minimum': 0,
'maximum': -1,
},
'security_groups': {
'minimum': 0,
'maximum': -1,
},
'security_group_rules': {
'minimum': 0,
'maximum': -1,
},
'key_pairs': {
'minimum': 0,
'maximum': -1,
},
})
def _stub_get_project_quotas(self):
def fake_get_project_quotas(context, resources, project_id,
quota_class=None, defaults=True,
usages=True, remains=False):
self.calls.append('get_project_quotas')
return dict((k, dict(limit=v.default))
for k, v in resources.items())
self.stubs.Set(self.driver, 'get_project_quotas',
fake_get_project_quotas)
def test_get_quotas_has_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], True)
self.assertEqual(self.calls, [])
def test_get_quotas_no_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], False)
self.assertEqual(self.calls, [])
def test_get_quotas_has_sync_no_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['metadata_items'], True)
self.assertEqual(self.calls, [])
def test_get_quotas_no_sync_has_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['instances'], False)
self.assertEqual(self.calls, [])
def test_get_quotas_has_sync(self):
self._stub_get_project_quotas()
result = self.driver._get_quotas(FakeContext('test_project',
'test_class'),
quota.QUOTAS._resources,
['instances', 'cores', 'ram',
'floating_ips', 'security_groups'],
True)
self.assertEqual(self.calls, ['get_project_quotas'])
self.assertEqual(result, dict(
instances=10,
cores=20,
ram=50 * 1024,
floating_ips=10,
security_groups=10,
))
def test_get_quotas_no_sync(self):
self._stub_get_project_quotas()
result = self.driver._get_quotas(FakeContext('test_project',
'test_class'),
quota.QUOTAS._resources,
['metadata_items', 'injected_files',
'injected_file_content_bytes',
'injected_file_path_bytes',
'security_group_rules'], False)
self.assertEqual(self.calls, ['get_project_quotas'])
self.assertEqual(result, dict(
metadata_items=128,
injected_files=5,
injected_file_content_bytes=10 * 1024,
injected_file_path_bytes=255,
security_group_rules=20,
))
def test_limit_check_under(self):
self._stub_get_project_quotas()
self.assertRaises(exception.InvalidQuotaValue,
self.driver.limit_check,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=-1))
def test_limit_check_over(self):
self._stub_get_project_quotas()
self.assertRaises(exception.OverQuota,
self.driver.limit_check,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=129))
def test_limit_check_unlimited(self):
self.flags(quota_metadata_items=-1)
self._stub_get_project_quotas()
self.driver.limit_check(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=32767))
def test_limit_check(self):
self._stub_get_project_quotas()
self.driver.limit_check(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(metadata_items=128))
def _stub_quota_reserve(self):
def fake_quota_reserve(context, resources, quotas, user_quotas, deltas,
expire, until_refresh, max_age, project_id=None,
user_id=None):
self.calls.append(('quota_reserve', expire, until_refresh,
max_age))
return ['resv-1', 'resv-2', 'resv-3']
self.stubs.Set(db, 'quota_reserve', fake_quota_reserve)
def test_reserve_bad_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.assertRaises(exception.InvalidReservationExpiration,
self.driver.reserve,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire='invalid')
self.assertEqual(self.calls, [])
def test_reserve_default_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2))
expire = timeutils.utcnow() + datetime.timedelta(seconds=86400)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_int_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=3600)
expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_timedelta_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire_delta = datetime.timedelta(seconds=60)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire_delta)
expire = timeutils.utcnow() + expire_delta
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_datetime_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_until_refresh(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(until_refresh=500)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 500, 0),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_reserve_max_age(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(max_age=86400)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(instances=2), expire=expire)
self.assertEqual(self.calls, [
'get_project_quotas',
('quota_reserve', expire, 0, 86400),
])
self.assertEqual(result, ['resv-1', 'resv-2', 'resv-3'])
def test_usage_reset(self):
calls = []
def fake_quota_usage_update(context, project_id, user_id, resource,
**kwargs):
calls.append(('quota_usage_update', context, project_id, user_id,
resource, kwargs))
if resource == 'nonexist':
raise exception.QuotaUsageNotFound(project_id=project_id)
self.stubs.Set(db, 'quota_usage_update', fake_quota_usage_update)
ctx = FakeContext('test_project', 'test_class')
resources = ['res1', 'res2', 'nonexist', 'res4']
self.driver.usage_reset(ctx, resources)
# Make sure we had some calls
self.assertEqual(len(calls), len(resources))
# Extract the elevated context that was used and do some
# sanity checks
elevated = calls[0][1]
self.assertEqual(elevated.project_id, ctx.project_id)
self.assertEqual(elevated.quota_class, ctx.quota_class)
self.assertEqual(elevated.is_admin, True)
# Now check that all the expected calls were made
exemplar = [('quota_usage_update', elevated, 'test_project',
'fake_user', res, dict(in_use=-1)) for res in resources]
self.assertEqual(calls, exemplar)
class FakeSession(object):
def begin(self):
return self
def add(self, instance):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
return False
class FakeUsage(sqa_models.QuotaUsage):
def save(self, *args, **kwargs):
pass
class QuotaReserveSqlAlchemyTestCase(test.TestCase):
# nova.db.sqlalchemy.api.quota_reserve is so complex it needs its
# own test case, and since it's a quota manipulator, this is the
# best place to put it...
def setUp(self):
super(QuotaReserveSqlAlchemyTestCase, self).setUp()
self.sync_called = set()
self.quotas = dict(
instances=5,
cores=10,
ram=10 * 1024,
fixed_ips=5,
)
self.deltas = dict(
instances=2,
cores=4,
ram=2 * 1024,
fixed_ips=2,
)
def make_sync(res_name):
def sync(context, project_id, user_id, session):
self.sync_called.add(res_name)
if res_name in self.usages:
if self.usages[res_name].in_use < 0:
return {res_name: 2}
else:
return {res_name: self.usages[res_name].in_use - 1}
return {res_name: 0}
return sync
self.resources = {}
for res_name in ('instances', 'cores', 'ram', 'fixed_ips'):
method_name = '_sync_%s' % res_name
sqa_api.QUOTA_SYNC_FUNCTIONS[method_name] = make_sync(res_name)
res = quota.ReservableResource(res_name, '_sync_%s' % res_name)
self.resources[res_name] = res
self.expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.usages = {}
self.usages_created = {}
self.reservations_created = {}
self.usages_list = [
dict(resource='instances',
project_id='test_project',
user_id='fake_user',
in_use=2,
reserved=2,
until_refresh=None),
dict(resource='cores',
project_id='test_project',
user_id='fake_user',
in_use=2,
reserved=4,
until_refresh=None),
dict(resource='ram',
project_id='test_project',
user_id='fake_user',
in_use=2,
reserved=2 * 1024,
until_refresh=None),
dict(resource='fixed_ips',
project_id='test_project',
user_id=None,
in_use=2,
reserved=2,
until_refresh=None),
]
def fake_get_session():
return FakeSession()
def fake_get_project_quota_usages(context, session, project_id):
return self.usages.copy()
def fake_get_user_quota_usages(context, session, project_id, user_id):
return self.usages.copy()
def fake_quota_usage_create(context, project_id, user_id, resource,
in_use, reserved, until_refresh,
session=None, save=True):
quota_usage_ref = self._make_quota_usage(
project_id, user_id, resource, in_use, reserved, until_refresh,
timeutils.utcnow(), timeutils.utcnow())
self.usages_created[resource] = quota_usage_ref
return quota_usage_ref
def fake_reservation_create(context, uuid, usage_id, project_id,
user_id, resource, delta, expire,
session=None):
reservation_ref = self._make_reservation(
uuid, usage_id, project_id, user_id, resource, delta, expire,
timeutils.utcnow(), timeutils.utcnow())
self.reservations_created[resource] = reservation_ref
return reservation_ref
self.stubs.Set(sqa_api, 'get_session', fake_get_session)
self.stubs.Set(sqa_api, '_get_project_quota_usages',
fake_get_project_quota_usages)
self.stubs.Set(sqa_api, '_get_user_quota_usages',
fake_get_user_quota_usages)
self.stubs.Set(sqa_api, '_quota_usage_create', fake_quota_usage_create)
self.stubs.Set(sqa_api, '_reservation_create', fake_reservation_create)
self.useFixture(test.TimeOverride())
def _make_quota_usage(self, project_id, user_id, resource, in_use,
reserved, until_refresh, created_at, updated_at):
quota_usage_ref = FakeUsage()
quota_usage_ref.id = len(self.usages) + len(self.usages_created)
quota_usage_ref.project_id = project_id
quota_usage_ref.user_id = user_id
quota_usage_ref.resource = resource
quota_usage_ref.in_use = in_use
quota_usage_ref.reserved = reserved
quota_usage_ref.until_refresh = until_refresh
quota_usage_ref.created_at = created_at
quota_usage_ref.updated_at = updated_at
quota_usage_ref.deleted_at = None
quota_usage_ref.deleted = False
return quota_usage_ref
def init_usage(self, project_id, user_id, resource, in_use, reserved=0,
until_refresh=None, created_at=None, updated_at=None):
if created_at is None:
created_at = timeutils.utcnow()
if updated_at is None:
updated_at = timeutils.utcnow()
if resource == 'fixed_ips':
user_id = None
quota_usage_ref = self._make_quota_usage(project_id, user_id, resource,
in_use, reserved,
until_refresh,
created_at, updated_at)
self.usages[resource] = quota_usage_ref
def compare_usage(self, usage_dict, expected):
for usage in expected:
resource = usage['resource']
for key, value in usage.items():
actual = getattr(usage_dict[resource], key)
self.assertEqual(actual, value,
"%s != %s on usage for resource %s" %
(actual, value, resource))
def _make_reservation(self, uuid, usage_id, project_id, user_id, resource,
delta, expire, created_at, updated_at):
reservation_ref = sqa_models.Reservation()
reservation_ref.id = len(self.reservations_created)
reservation_ref.uuid = uuid
reservation_ref.usage_id = usage_id
reservation_ref.project_id = project_id
reservation_ref.user_id = user_id
reservation_ref.resource = resource
reservation_ref.delta = delta
reservation_ref.expire = expire
reservation_ref.created_at = created_at
reservation_ref.updated_at = updated_at
reservation_ref.deleted_at = None
reservation_ref.deleted = False
return reservation_ref
def compare_reservation(self, reservations, expected):
reservations = set(reservations)
for resv in expected:
resource = resv['resource']
resv_obj = self.reservations_created[resource]
self.assertIn(resv_obj.uuid, reservations)
reservations.discard(resv_obj.uuid)
for key, value in resv.items():
actual = getattr(resv_obj, key)
self.assertEqual(actual, value,
"%s != %s on reservation for resource %s" %
(actual, value, resource))
self.assertEqual(len(reservations), 0)
def _update_reservations_list(self, usage_id_change=False,
delta_change=False):
reservations_list = [
dict(resource='instances',
project_id='test_project',
delta=2),
dict(resource='cores',
project_id='test_project',
delta=4),
dict(resource='ram',
delta=2 * 1024),
dict(resource='fixed_ips',
project_id='test_project',
delta=2),
]
if usage_id_change:
reservations_list[0]["usage_id"] = self.usages_created['instances']
reservations_list[1]["usage_id"] = self.usages_created['cores']
reservations_list[2]["usage_id"] = self.usages_created['ram']
reservations_list[3]["usage_id"] = self.usages_created['fixed_ips']
else:
reservations_list[0]["usage_id"] = self.usages['instances']
reservations_list[1]["usage_id"] = self.usages['cores']
reservations_list[2]["usage_id"] = self.usages['ram']
reservations_list[3]["usage_id"] = self.usages['fixed_ips']
if delta_change:
reservations_list[0]["delta"] = -2
reservations_list[1]["delta"] = -4
reservations_list[2]["delta"] = -2 * 1024
reservations_list[3]["delta"] = -2
return reservations_list
def _init_usages(self, *in_use, **kwargs):
for i, option in enumerate(('instances', 'cores', 'ram', 'fixed_ips')):
self.init_usage('test_project', 'fake_user',
option, in_use[i], **kwargs)
return FakeContext('test_project', 'test_class')
def test_quota_reserve_create_usages(self):
context = FakeContext('test_project', 'test_class')
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
0, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores',
'ram', 'fixed_ips']))
self.usages_list[0]["in_use"] = 0
self.usages_list[1]["in_use"] = 0
self.usages_list[2]["in_use"] = 0
self.usages_list[3]["in_use"] = 0
self.compare_usage(self.usages_created, self.usages_list)
reservations_list = self._update_reservations_list(True)
self.compare_reservation(result, reservations_list)
def test_quota_reserve_negative_in_use(self):
context = self._init_usages(-1, -1, -1, -1, until_refresh=1)
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
5, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores',
'ram', 'fixed_ips']))
self.usages_list[0]["until_refresh"] = 5
self.usages_list[1]["until_refresh"] = 5
self.usages_list[2]["until_refresh"] = 5
self.usages_list[3]["until_refresh"] = 5
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, self._update_reservations_list())
def test_quota_reserve_until_refresh(self):
context = self._init_usages(3, 3, 3, 3, until_refresh=1)
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
5, 0)
self.assertEqual(self.sync_called, set(['instances', 'cores',
'ram', 'fixed_ips']))
self.usages_list[0]["until_refresh"] = 5
self.usages_list[1]["until_refresh"] = 5
self.usages_list[2]["until_refresh"] = 5
self.usages_list[3]["until_refresh"] = 5
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, self._update_reservations_list())
def test_quota_reserve_max_age(self):
max_age = 3600
record_created = (timeutils.utcnow() -
datetime.timedelta(seconds=max_age))
context = self._init_usages(3, 3, 3, 3, created_at=record_created,
updated_at=record_created)
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
0, max_age)
self.assertEqual(self.sync_called, set(['instances', 'cores',
'ram', 'fixed_ips']))
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, self._update_reservations_list())
def test_quota_reserve_no_refresh(self):
context = self._init_usages(3, 3, 3, 3)
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
0, 0)
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 3
self.usages_list[1]["in_use"] = 3
self.usages_list[2]["in_use"] = 3
self.usages_list[3]["in_use"] = 3
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.compare_reservation(result, self._update_reservations_list())
def test_quota_reserve_unders(self):
context = self._init_usages(1, 3, 1 * 1024, 1)
self.deltas["instances"] = -2
self.deltas["cores"] = -4
self.deltas["ram"] = -2 * 1024
self.deltas["fixed_ips"] = -2
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
0, 0)
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 1
self.usages_list[0]["reserved"] = 0
self.usages_list[1]["in_use"] = 3
self.usages_list[1]["reserved"] = 0
self.usages_list[2]["in_use"] = 1 * 1024
self.usages_list[2]["reserved"] = 0
self.usages_list[3]["in_use"] = 1
self.usages_list[3]["reserved"] = 0
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
reservations_list = self._update_reservations_list(False, True)
self.compare_reservation(result, reservations_list)
def test_quota_reserve_overs(self):
context = self._init_usages(4, 8, 10 * 1024, 4)
try:
sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire, 0, 0)
except exception.OverQuota, e:
expected_kwargs = {'code': 500,
'usages': {'instances': {'reserved': 0, 'in_use': 4},
'ram': {'reserved': 0, 'in_use': 10240},
'fixed_ips': {'reserved': 0, 'in_use': 4},
'cores': {'reserved': 0, 'in_use': 8}},
'headroom': {'cores': 2, 'ram': 0, 'fixed_ips': 1,
'instances': 1},
'overs': ['cores', 'fixed_ips', 'instances', 'ram'],
'quotas': {'cores': 10, 'ram': 10240,
'fixed_ips': 5, 'instances': 5}}
self.assertEqual(e.kwargs, expected_kwargs)
else:
self.fail('Expected OverQuota failure')
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 4
self.usages_list[0]["reserved"] = 0
self.usages_list[1]["in_use"] = 8
self.usages_list[1]["reserved"] = 0
self.usages_list[2]["in_use"] = 10 * 1024
self.usages_list[2]["reserved"] = 0
self.usages_list[3]["in_use"] = 4
self.usages_list[3]["reserved"] = 0
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.assertEqual(self.reservations_created, {})
def test_quota_reserve_cores_unlimited(self):
# Requesting 8 cores, quota_cores set to unlimited:
self.flags(quota_cores=-1)
context = self._init_usages(1, 8, 1 * 1024, 1)
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 1
self.usages_list[0]["reserved"] = 0
self.usages_list[1]["in_use"] = 8
self.usages_list[1]["reserved"] = 0
self.usages_list[2]["in_use"] = 1 * 1024
self.usages_list[2]["reserved"] = 0
self.usages_list[3]["in_use"] = 1
self.usages_list[3]["reserved"] = 0
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.assertEqual(self.reservations_created, {})
def test_quota_reserve_ram_unlimited(self):
# Requesting 10*1024 ram, quota_ram set to unlimited:
self.flags(quota_ram=-1)
context = self._init_usages(1, 1, 10 * 1024, 1)
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 1
self.usages_list[0]["reserved"] = 0
self.usages_list[1]["in_use"] = 1
self.usages_list[1]["reserved"] = 0
self.usages_list[2]["in_use"] = 10 * 1024
self.usages_list[2]["reserved"] = 0
self.usages_list[3]["in_use"] = 1
self.usages_list[3]["reserved"] = 0
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
self.assertEqual(self.reservations_created, {})
def test_quota_reserve_reduction(self):
context = self._init_usages(10, 20, 20 * 1024, 10)
self.deltas["instances"] = -2
self.deltas["cores"] = -4
self.deltas["ram"] = -2 * 1024
self.deltas["fixed_ips"] = -2
result = sqa_api.quota_reserve(context, self.resources, self.quotas,
self.quotas, self.deltas, self.expire,
0, 0)
self.assertEqual(self.sync_called, set([]))
self.usages_list[0]["in_use"] = 10
self.usages_list[0]["reserved"] = 0
self.usages_list[1]["in_use"] = 20
self.usages_list[1]["reserved"] = 0
self.usages_list[2]["in_use"] = 20 * 1024
self.usages_list[2]["reserved"] = 0
self.usages_list[3]["in_use"] = 10
self.usages_list[3]["reserved"] = 0
self.compare_usage(self.usages, self.usages_list)
self.assertEqual(self.usages_created, {})
reservations_list = self._update_reservations_list(False, True)
self.compare_reservation(result, reservations_list)
class NoopQuotaDriverTestCase(test.TestCase):
def setUp(self):
super(NoopQuotaDriverTestCase, self).setUp()
self.flags(quota_instances=10,
quota_cores=20,
quota_ram=50 * 1024,
quota_floating_ips=10,
quota_metadata_items=128,
quota_injected_files=5,
quota_injected_file_content_bytes=10 * 1024,
quota_injected_file_path_bytes=255,
quota_security_groups=10,
quota_security_group_rules=20,
reservation_expire=86400,
until_refresh=0,
max_age=0,
)
self.expected_with_usages = {}
self.expected_without_usages = {}
self.expected_without_dict = {}
for r in quota.QUOTAS._resources:
self.expected_with_usages[r] = dict(limit=-1,
in_use=-1,
reserved=-1)
self.expected_without_usages[r] = dict(limit=-1)
self.expected_without_dict[r] = -1
self.driver = quota.NoopQuotaDriver()
def test_get_defaults(self):
# Use our pre-defined resources
result = self.driver.get_defaults(None, quota.QUOTAS._resources)
self.assertEqual(self.expected_without_dict, result)
def test_get_class_quotas(self):
result = self.driver.get_class_quotas(None,
quota.QUOTAS._resources,
'test_class')
self.assertEqual(self.expected_without_dict, result)
def test_get_class_quotas_no_defaults(self):
result = self.driver.get_class_quotas(None,
quota.QUOTAS._resources,
'test_class',
False)
self.assertEqual(self.expected_without_dict, result)
def test_get_project_quotas(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project')
self.assertEqual(self.expected_with_usages, result)
def test_get_user_quotas(self):
result = self.driver.get_user_quotas(None,
quota.QUOTAS._resources,
'test_project',
'fake_user')
self.assertEqual(self.expected_with_usages, result)
def test_get_project_quotas_no_defaults(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project',
defaults=False)
self.assertEqual(self.expected_with_usages, result)
def test_get_user_quotas_no_defaults(self):
result = self.driver.get_user_quotas(None,
quota.QUOTAS._resources,
'test_project',
'fake_user',
defaults=False)
self.assertEqual(self.expected_with_usages, result)
def test_get_project_quotas_no_usages(self):
result = self.driver.get_project_quotas(None,
quota.QUOTAS._resources,
'test_project',
usages=False)
self.assertEqual(self.expected_without_usages, result)
def test_get_user_quotas_no_usages(self):
result = self.driver.get_user_quotas(None,
quota.QUOTAS._resources,
'test_project',
'fake_user',
usages=False)
self.assertEqual(self.expected_without_usages, result)
| apache-2.0 |
aws/aws-sdk-js | test/services/sts.spec.js | 12227 | // Generated by CoffeeScript 1.12.3
(function() {
var AWS, helpers;
helpers = require('../helpers');
AWS = helpers.AWS;
describe('AWS.STS', function() {
var sts;
sts = null;
beforeEach(function() {
return sts = new AWS.STS();
});
describe('credentialsFrom', function() {
it('returns null if no data is provided', function() {
return expect(sts.credentialsFrom(null)).to.equal(null);
});
it('creates a TemporaryCredentials object with hydrated data', function() {
var creds;
creds = sts.credentialsFrom({
Credentials: {
AccessKeyId: 'KEY',
SecretAccessKey: 'SECRET',
SessionToken: 'TOKEN',
Expiration: new Date(0)
}
});
expect(creds instanceof AWS.TemporaryCredentials);
expect(creds.accessKeyId).to.equal('KEY');
expect(creds.secretAccessKey).to.equal('SECRET');
expect(creds.sessionToken).to.equal('TOKEN');
expect(creds.expireTime).to.eql(new Date(0));
return expect(creds.expired).to.equal(false);
});
it('updates an existing Credentials object with hydrated data', function() {
var creds, data;
data = {
Credentials: {
AccessKeyId: 'KEY',
SecretAccessKey: 'SECRET',
SessionToken: 'TOKEN',
Expiration: new Date(0)
}
};
creds = new AWS.Credentials;
sts.credentialsFrom(data, creds);
expect(creds).to.be.instanceOf(AWS.Credentials);
expect(creds.accessKeyId).to.equal('KEY');
expect(creds.secretAccessKey).to.equal('SECRET');
expect(creds.sessionToken).to.equal('TOKEN');
expect(creds.expireTime).to.eql(new Date(0));
return expect(creds.expired).to.equal(false);
});
});
describe('assumeRoleWithWebIdentity', function() {
var service;
service = new AWS.STS;
it('sends an unsigned POST request', function() {
var params;
helpers.mockHttpResponse(200, {}, '{}');
params = {
RoleArn: 'ARN',
RoleSessionName: 'NAME',
WebIdentityToken: 'TOK'
};
return service.assumeRoleWithWebIdentity(params, function() {
var hr;
hr = this.request.httpRequest;
expect(hr.method).to.equal('POST');
expect(hr.body).to.equal('Action=AssumeRoleWithWebIdentity&' + 'RoleArn=ARN&RoleSessionName=NAME&Version=' + service.api.apiVersion + '&WebIdentityToken=TOK');
expect(hr.headers['Authorization']).to.equal(void 0);
expect(hr.headers['Content-Type']).to.equal('application/x-www-form-urlencoded; charset=utf-8');
return expect(hr.path).to.equal('/');
});
});
it('can build a post request on a mounted path (custom endpoint)', function() {
var params;
helpers.mockHttpResponse(200, {}, '{}');
service = new AWS.STS({
endpoint: 'http://localhost/foo/bar'
});
params = {
RoleArn: 'ARN',
RoleSessionName: 'NAME',
WebIdentityToken: 'TOK'
};
return service.assumeRoleWithWebIdentity(params, function() {
var hr;
hr = this.request.httpRequest;
expect(hr.path).to.equal('/foo/bar');
return expect(hr.body).to.equal('Action=AssumeRoleWithWebIdentity&' + 'RoleArn=ARN&RoleSessionName=NAME&Version=' + service.api.apiVersion + '&WebIdentityToken=TOK');
});
});
});
describe('assumeRoleWithSAML', function() {
var service;
service = new AWS.STS;
return it('sends an unsigned POST request', function() {
var params;
helpers.mockHttpResponse(200, {}, '{}');
params = {
RoleArn: 'ARN',
PrincipalArn: 'PARN',
SAMLAssertion: 'OK'
};
return service.assumeRoleWithSAML(params, function() {
var hr;
hr = this.request.httpRequest;
expect(hr.method).to.equal('POST');
expect(hr.body).to.equal('Action=AssumeRoleWithSAML&' + 'PrincipalArn=PARN&RoleArn=ARN&SAMLAssertion=OK&' + 'Version=' + service.api.apiVersion);
expect(hr.headers['Authorization']).to.equal(void 0);
expect(hr.headers['Content-Type']).to.equal('application/x-www-form-urlencoded; charset=utf-8');
return expect(hr.path).to.equal('/');
});
});
});
describe('regional endpoints', function() {
describe('stsRegionalConfig client config', function() {
it ('should set the service client stsRegionalConfig config', function() {
helpers.mockHttpResponse(200, {}, '{}');
var values = ['regional', 'RegionaL', 'legacy', 'LegacY'];
for (var i = 0; i < values.length; i++) {
var sts = new AWS.STS({stsRegionalEndpoints: values[i]});
var request = sts.getCallerIdentity().build(function() {});
expect(['regional', 'legacy'].indexOf(request.service.config.stsRegionalEndpoints) >= 0).to.equal(true);
}
});
it('should throw if the config is set to invalid values', function() {
helpers.mockHttpResponse(200, {}, '{}');
var values = ['foo', 'bar', 'region'];
var errors = [];
for (var i = 0; i < values.length; i++) {
var sts = new AWS.STS({stsRegionalEndpoints: values[i]});
sts.getCallerIdentity().build(function(err) {
errors.push(err);
});
}
expect(errors.length).to.equal(values.length);
for (var i = 0; i < errors.length; i++) {
expect(errors[i].code).to.equal('InvalidConfiguration');
}
});
});
if (AWS.util.isNode()) {
describe('AWS_STS_REGIONAL_ENDPOINTS environmental variable', function() {
var originalEnv;
beforeEach(function() {
originalEnv = process.env;
process.env = {};
});
afterEach(function() {
process.env = originalEnv;
});
it('should be used if client config is not set', function() {
process.env.AWS_STS_REGIONAL_ENDPOINTS = 'Regional';
var sts = new AWS.STS();
sts.getCallerIdentity().build(function(err) {});
expect(sts.config.stsRegionalEndpoints).to.equal('regional');
process.env.AWS_STS_REGIONAL_ENDPOINTS = 'LegacY';
sts = new AWS.STS();
sts.getCallerIdentity().build(function(err) {});
expect(sts.config.stsRegionalEndpoints).to.equal('legacy');
});
it('should throw if the config is set to invalid values', function() {
var values = ['foo', 'bar', 'region'];
var errors = [];
for (var i = 0; i < values.length; i++) {
process.env.AWS_STS_REGIONAL_ENDPOINTS = values[i];
sts = new AWS.STS();
sts.getCallerIdentity().build(function(err) {
errors.push(err);
});
}
expect(errors.length).to.equal(values.length);
for (var i = 0; i < errors.length; i++) {
expect(errors[i].code).to.equal('InvalidEnvironmentalVariable');
}
});
});
describe('sts_regional_endpoints config file entry', function() {
it('should be used if environmental variable is not set', function() {
helpers.spyOn(AWS.util, 'getProfilesFromSharedConfig').andReturn({
default: {
sts_regional_endpoints: 'RegionaL'
}
});
var sts = new AWS.STS();
sts.getCallerIdentity().build(function() {});
expect(sts.config.stsRegionalEndpoints).to.equal('regional');
helpers.spyOn(AWS.util, 'getProfilesFromSharedConfig').andReturn({
default: {
sts_regional_endpoints: 'LegaCy'
}
});
var sts = new AWS.STS();
sts.getCallerIdentity().build(function() {});
expect(sts.config.stsRegionalEndpoints).to.equal('legacy');
});
it('should throw if the config is set to invalid values', function() {
var values = ['foo', 'bar', 'region'];
var errors = [];
for (var i = 0; i < values.length; i++) {
helpers.spyOn(AWS.util, 'getProfilesFromSharedConfig').andReturn({
default: {
sts_regional_endpoints: values[i]
}
});
sts = new AWS.STS();
sts.getCallerIdentity().build(function(err) {
errors.push(err);
});
}
expect(errors.length).to.equal(values.length);
for (var i = 0; i < errors.length; i++) {
expect(errors[i].code).to.equal('InvalidConfiguration');
}
});
});
}
describe('service client stsRegionalConfig config', function() {
var originalRegion;
var originalEnv;
beforeEach(function() {
originalRegion = AWS.config.region;
AWS.config.region = undefined;
//fix CodeBuild test because it comes with AWS_REGION in environment
if (AWS.util.isNode()) {
originalEnv = process.env;
process.env = originalEnv;
}
});
afterEach(function() {
AWS.config.region = originalRegion;
if (AWS.util.isNode()) {
process.env = {};
}
});
it('should use global endpoints for when config is undefined', function() {
var regions = ['us-west-2', 'ap-east-1'];
for (var i = 0; i < regions.length; i++) {
var sts = new AWS.STS({region: regions[i]});
var request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.amazonaws.com');
}
var sts = new AWS.STS({region: 'cn-north-1'});
request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.cn-north-1.amazonaws.com.cn');
});
it('should use global endpoints for when config is set to legacy', function() {
var regions = ['us-west-2', 'ap-east-1'];
for (var i = 0; i < regions.length; i++) {
var sts = new AWS.STS({region: regions[i], stsRegionalEndpoints: 'legacy'});
var request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.amazonaws.com');
}
var sts = new AWS.STS({region: 'cn-north-1', stsRegionalEndpoints: 'legacy'});
request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.cn-north-1.amazonaws.com.cn');
});
it('should use regional endpoints for when config is set to regional', function() {
var regions = ['us-west-2', 'ap-east-1'];
for (var i = 0; i < regions.length; i++) {
var sts = new AWS.STS({region: regions[i], stsRegionalEndpoints: 'regional'});
var request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.' + regions[i] + '.amazonaws.com');
}
var sts = new AWS.STS({region: 'cn-north-1', stsRegionalEndpoints: 'regional'});
request = sts.getCallerIdentity().build(function() {});
expect(request.httpRequest.endpoint.hostname).to.equal('sts.cn-north-1.amazonaws.com.cn');
});
it('should ask for region if stsRegionalEndpoints is set', function() {
var error;
sts = new AWS.STS({stsRegionalEndpoints: 'regional'});
sts.getCallerIdentity().build(function(err) {
error = err;
});
expect(error.code).to.equal('ConfigError');
expect(error.message).to.equal('Missing region in config');
});
});
});
});
}).call(this);
| apache-2.0 |
p4scu41/sail | include/clases/IndicadorDiagnosticoOportuno.php | 6226 | <?php
class IndicadorDiagnosticoOportuno {
// VALORES DE ENTRADA
public $idCatEstado;
public $idCatJurisdiccion;
public $fechaInicio;
public $fechaFin;
// VALORES DE ENTRADA
// VALORES FIJOS
public $estandar = 100;
public $ponderacion = 30;
public $nombre = "Diagnóstico Oportuno";
// VALORES FIJOS
// VALORES CALCULADOS
public $resultado;
public $indice;
public $CasosNuevosSinDiscapacidad;
public $totalCasosNuevosDiagnosticados;
// VALORES CALCULADOS
public $error = false;
public $msgError;
public function calcular() {
if (is_null($this->idCatEstado) || is_null($this->idCatJurisdiccion) || is_null($this->fechaInicio) || is_null($this->fechaFin)) {
$this->error = true;
$this->msgError = "El indicador requiere del identificador de estado y jurisdiccion, asi como de una fecha de inicio y fin.";
} else {
$sql = "SELECT count(DISTINCT d.idPaciente) AS CasosNuevosSinDiscapacidad " .
"FROM diagnostico d, pacientes p " .
"WHERE d.idPaciente = p.idPaciente " .
"AND d.discOjoIzq = 0 " .
"AND d.discOjoDer = 0 " .
"AND d.discManoIzq = 0 " .
"AND d.discManoDer = 0 " .
"AND d.discPieIzq = 0 " .
"AND d.discPieDer = 0 " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "'" .
"AND p.idCatEstado = " . $this->idCatEstado . ";";
if ($this->idCatJurisdiccion != 0)
$sql = "SELECT count(DISTINCT d.idPaciente) AS CasosNuevosSinDiscapacidad " .
"FROM diagnostico d, pacientes p, catMunicipio m " .
"WHERE d.idPaciente = p.idPaciente " .
"AND m.idCatEstado = p.idCatEstado " .
"AND p.idCatMunicipio = m.idCatMunicipio " .
"AND m.idCatJurisdiccion = " . $this->idCatJurisdiccion . " " .
"AND d.discOjoIzq = 0 " .
"AND d.discOjoDer = 0 " .
"AND d.discManoIzq = 0 " .
"AND d.discManoDer = 0 " .
"AND d.discPieIzq = 0 " .
"AND d.discPieDer = 0 " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "'" .
"AND p.idCatEstado = " . $this->idCatEstado . ";";
if ($this->idCatEstado == 0)
$sql = "SELECT count(DISTINCT d.idPaciente) AS CasosNuevosSinDiscapacidad " .
"FROM diagnostico d, pacientes p " .
"WHERE d.idPaciente = p.idPaciente " .
"AND d.discOjoIzq = 0 " .
"AND d.discOjoDer = 0 " .
"AND d.discManoIzq = 0 " .
"AND d.discManoDer = 0 " .
"AND d.discPieIzq = 0 " .
"AND d.discPieDer = 0 " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "';";
$consulta = ejecutaQueryClases($sql);
if (is_string($consulta)) {
$this->error = true;
$this->msgError = $consulta . " SQL:" . $sql;
} else {
$tabla = devuelveRowAssoc($consulta);
$this->CasosNuevosSinDiscapacidad = $tabla["CasosNuevosSinDiscapacidad"];
$sql = "SELECT count(DISTINCT d.idPaciente) AS totalCasosNuevosDiagnosticados " .
"FROM diagnostico d, pacientes p " .
"WHERE d.idPaciente = p.idPaciente " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "'" .
"AND p.idCatEstado = " . $this->idCatEstado . ";";
if ($this->idCatJurisdiccion != 0)
$sql = "SELECT count(DISTINCT d.idPaciente) AS totalCasosNuevosDiagnosticados " .
"FROM diagnostico d, pacientes p, catMunicipio m " .
"WHERE d.idPaciente = p.idPaciente " .
"AND m.idCatEstado = p.idCatEstado " .
"AND p.idCatMunicipio = m.idCatMunicipio " .
"AND m.idCatJurisdiccion = " . $this->idCatJurisdiccion . " " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "'" .
"AND p.idCatEstado = " . $this->idCatEstado . ";";
if ($this->idCatEstado == 0)
$sql = "SELECT count(DISTINCT d.idPaciente) AS totalCasosNuevosDiagnosticados " .
"FROM diagnostico d, pacientes p " .
"WHERE d.idPaciente = p.idPaciente " .
"AND p.fechaDiagnostico BETWEEN '" . formatFechaObj($this->fechaInicio, 'Y-m-d') . "' AND '" . formatFechaObj($this->fechaFin, 'Y-m-d') . "'";
$consulta = ejecutaQueryClases($sql);
if (is_string($consulta)) {
$this->error = true;
$this->msgError = $consulta . " SQL:" . $sql;
} else {
$tabla = devuelveRowAssoc($consulta);
$this->totalCasosNuevosDiagnosticados = $tabla["totalCasosNuevosDiagnosticados"];
if ($this->totalCasosNuevosDiagnosticados != 0) {
$this->resultado = ($this->CasosNuevosSinDiscapacidad / $this->totalCasosNuevosDiagnosticados) * 100;
$this->indice = ($this->resultado * $this->ponderacion) / 100;
} else {
$this->resultado = "-";
$this->indice = "No Aplica";
}
}
}
}
} // Calcular
function imprimir() {
$sql = "SELECT e.nombre AS estado, j.nombre AS jurisdiccion FROM catJurisdiccion j, catEstado e WHERE j.idCatEstado = e.idCatEstado AND e.idCatEstado = " . $this->idCatEstado . " AND j.idCatJurisdiccion = " . $this->idCatJurisdiccion . ";";
if ($this->idCatJurisdiccion == 0) $sql = "SELECT e.nombre AS estado FROM catEstado e WHERE e.idCatEstado = " . $this->idCatEstado . ";";
$jurisdiccion = "";
$estado = "";
$consulta = ejecutaQueryClases($sql);
if (!is_string($consulta)) {
$tabla = devuelveRowAssoc($consulta);
$estado = $tabla["estado"];
$jurisdiccion = "Estatal";
if ($this->idCatJurisdiccion != 0) $jurisdiccion = "Jurisdicci�n #" . $this->idCatJurisdiccion . " " . $tabla["jurisdiccion"];
}
echo '<DIV CLASS="datagrid"><TABLE><THEAD><TR><TH COLSPAN="5">' . $estado . "<BR>" . $jurisdiccion . '</TH></TR>' .
'<TR><TH>Indicador</TH><TH>Estándar</TH><TH>Resultado</TH><TH>Ponderación</TH><TH>Índice</TH></TR></THEAD>' .
'<TR><TD>' . $this->nombre .
'</TD><TD>' . $this->estandar . "%" .
'</TD><TD>' . $this->resultado .
'</TD><TD>' . $this->ponderacion .
'</TD><TD>' . $this->indice .
'</TD></TR></TABLE></DIV>';
}
}
?>
| apache-2.0 |
stserp/erp1 | source/src/com/baosight/sts/st/rp/service/ServiceSTRP0403.java | 3825 | package com.baosight.sts.st.rp.service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.tools.ant.taskdefs.XSLTProcess.Param;
import com.baosight.iplat4j.core.ei.EiBlock;
import com.baosight.iplat4j.core.ei.EiBlockMeta;
import com.baosight.iplat4j.core.ei.EiColumn;
import com.baosight.iplat4j.core.ei.EiInfo;
import com.baosight.iplat4j.core.threadlocal.UserSession;
import com.baosight.iplat4j.ef.ui.cascade.CascadeSelect;
import com.baosight.iplat4j.ep.util.MethodParamConstants;
import com.baosight.sts.st.rp.domain.STRP0403;
import com.baosight.sts.util.BizConstants;
public class ServiceSTRP0403 extends CascadeSelect {
public EiInfo initLoad(EiInfo inInfo){
EiInfo outInfo = super.initLoad(inInfo);
outInfo.getBlock("result").setRows(null);
return outInfo;
}
public EiInfo query(EiInfo inInfo){
STRP0403 strp0403 = new STRP0403();
inInfo.setCell("inqu_status", 0,"segNo",UserSession.getInSessionProperty(BizConstants.ORG_ID));
inInfo.setMethodParam(MethodParamConstants.sqlName, "STRP0403.select");
inInfo.setMethodParam(MethodParamConstants.daoEPBaseBean, strp0403);
inInfo.setMethodParam(MethodParamConstants.inDataBlock, "result");
inInfo.setMethodParam(MethodParamConstants.outDataBlock, "result");
EiInfo outInfo = super.query(inInfo,true);
return outInfo;
}
public EiInfo queryAmount(EiInfo inInfo){
String segNo = (String) UserSession.getInSessionProperty(BizConstants.ORG_ID);
String startDate = inInfo.getCellStr("inqu_status", 0, "startDate");
EiBlock inquStatus;
inquStatus = inInfo.getBlock("inqu_status");
Map pMap = new HashMap();
pMap = inquStatus.getRow(0);
pMap.put("segNo", segNo);
if(startDate.equals("") || startDate.equals(" ")){
startDate = "1900-01-01";
pMap.put("startDate", startDate);
}
//期初
List qichuList = dao.query("STRP0403.qichu",pMap);
if(qichuList != null && qichuList.size() > 0){
String qichuString = String.valueOf(((Map)qichuList.get(0)).get("amount"));
inInfo.set("qichu", qichuString);
}
//期间
List qijianList = dao.query("STRP0403.qijian",pMap);
if(qijianList != null && qijianList.size() > 0){
String qijianString = String.valueOf(((Map)qijianList.get(0)).get("amount"));
inInfo.set("qijian", qijianString);
}
inInfo.setMsg("查询成功.");
return inInfo;
}
// 树节点列表查询方法
public EiInfo queryEiInfo(List list, Map map) {
String blockName = list.get(list.size() - 1).toString();
String segNo = (String) UserSession.getInSessionProperty(BizConstants.ORG_ID);
int index = list.size() - 1;
EiInfo info = new EiInfo();
EiBlockMeta meta = new EiBlockMeta();
EiColumn eiColumn = null;
List aa = null;
map.put("segNo", segNo);
map.put("codeType", "BANK_SUBJECTID");
if (blockName.equals("inqu_status-0-bankId")) {// 查询区域的供应商代码树列表
aa = dao.query("STRP0403.getList", map);
switch (index) {
case 0 :
eiColumn = new EiColumn("codeValue");
eiColumn.setDescName("银行代码");
meta.addMeta(eiColumn);
eiColumn = new EiColumn("codeDesc");
eiColumn.setDescName("银行名称");
meta.addMeta(eiColumn);
break;
}
}
info.addBlock(blockName);
EiBlock block = info.getBlock(blockName);
block.setBlockMeta(meta);
List querylist = queryList(aa, meta);
boolean flag = true;
for (int i = 0; i < list.size(); i++) {
if (list.get(i).toString().trim().equals("")) {
flag = false;
}
}
if (!flag) {
querylist.clear();
block.setRows(querylist);
} else {
block.setRows(querylist);
}
info.set("count", new Integer(querylist.size()));
info.setMsg("查询成功.");
return info;
}
}
| apache-2.0 |
jy01649210/ambrose | hive/src/main/java/com/twitter/ambrose/hive/AmbroseHivePreJob.java | 3268 | package com.twitter.ambrose.hive;
import static com.twitter.ambrose.hive.reporter.AmbroseHiveReporterFactory.getEmbeddedProgressReporter;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.PreJobHook;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.mapred.JobConf;
import com.twitter.ambrose.hive.reporter.EmbeddedAmbroseHiveProgressReporter;
import com.twitter.ambrose.model.DAGNode;
import com.twitter.ambrose.model.Event;
import com.twitter.ambrose.model.Job;
import com.twitter.ambrose.model.Event.WorkflowProgressField;
public class AmbroseHivePreJob implements PreJobHook {
private static final Log LOG = LogFactory.getLog(AmbroseHiveStatPublisher.class);
private final Map<WorkflowProgressField, String> eventData =
new HashMap<WorkflowProgressField, String>(1);
public AmbroseHivePreJob() throws IOException {
Configuration conf = SessionState.get().getConf();
}
@Override
public void run(SessionState session, QueryPlan queryPlan, JobConf job,
Integer taskId) throws Exception {
Map<String, Double> counterValue = new HashMap<String, Double>();
// send job statistics to the Ambrose server
send(job, counterValue);
}
private void send(JobConf jobConf, Map<String, Double> counterValues) {
EmbeddedAmbroseHiveProgressReporter reporter = getEmbeddedProgressReporter();
Configuration conf = SessionState.get().getConf();
String queryId = AmbroseHiveUtil.getHiveQueryId(conf);
Map<String, DAGNode<Job>> nodeIdToDAGNode = reporter.getNodeIdToDAGNode();
String jobName = jobConf.getJobName();
String prefix = "";
// AmbroseHiveUtil.getNodeIdFromNodeName(jobConf, jobConf.getJobName())
// jobConf.getJobName() didn't equal to runningJob.getJobName, it contains prefix of the nodeName
for(String nodeKey : nodeIdToDAGNode.keySet()) {
prefix = nodeKey.split("_")[0];
if(jobName.contains(prefix)) break;
}
String nodeId = prefix + "_" + AmbroseHiveUtil.getHiveQueryId(jobConf);
DAGNode<Job> dagNode = nodeIdToDAGNode.get(nodeId);
if (dagNode == null) {
LOG.warn("jobStartedNotification - unrecorgnized operator name found for " + "jobId "
+ jobConf);
return;
}
HiveJob job = (HiveJob) dagNode.getJob();
// a job has been started
if (job.getId() == null) {
// job identifier on GUI
reporter.pushEvent(queryId, new Event.JobStartedEvent(dagNode));
Event<DAGNode<? extends Job>> event = new Event.JobProgressEvent(dagNode);
pushWorkflowProgress(queryId, reporter);
reporter.pushEvent(queryId, event);
}
}
private void pushWorkflowProgress(String queryId, EmbeddedAmbroseHiveProgressReporter reporter) {
eventData.put(WorkflowProgressField.workflowProgress,
Integer.toString(reporter.getOverallProgress()));
reporter.pushEvent(queryId, new Event.WorkflowProgressEvent(eventData));
}
} | apache-2.0 |
jlz27/gs-collections | collections/src/main/java/com/gs/collections/impl/lazy/iterator/ZipWithIndexIterator.java | 1456 | /*
* Copyright 2011 Goldman Sachs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.impl.lazy.iterator;
import java.util.Iterator;
import com.gs.collections.api.tuple.Pair;
import com.gs.collections.impl.tuple.Tuples;
public final class ZipWithIndexIterator<T>
implements Iterator<Pair<T, Integer>>
{
private final Iterator<T> iterator;
private int index = 0;
public ZipWithIndexIterator(Iterable<T> iterable)
{
this.iterator = iterable.iterator();
}
public void remove()
{
throw new UnsupportedOperationException("Cannot remove from a collect iterator");
}
public boolean hasNext()
{
return this.iterator.hasNext();
}
public Pair<T, Integer> next()
{
try
{
return Tuples.pair(this.iterator.next(), this.index);
}
finally
{
this.index += 1;
}
}
}
| apache-2.0 |
1974kpkpkp/twitter-cldr-rb | spec/utils/code_points_spec.rb | 1818 | # encoding: UTF-8
# Copyright 2012 Twitter, Inc
# http://www.apache.org/licenses/LICENSE-2.0
require 'spec_helper'
describe TwitterCldr::Utils::CodePoints do
describe '#to_char' do
it 'converts unicode code points to the actual character' do
TwitterCldr::Utils::CodePoints.to_char(0x221E).should == '∞'
end
end
describe '#from_char' do
it 'converts a character to a unicode code point' do
TwitterCldr::Utils::CodePoints.from_char('∞').should == 0x221E
end
end
describe '#to_chars' do
it 'should handle an empty array' do
TwitterCldr::Utils::CodePoints.to_chars([]).should == []
end
it 'converts an array of unicode code points to an array of chars' do
TwitterCldr::Utils::CodePoints.to_chars([0x65, 0x73, 0x70]).should == %w[e s p]
end
end
describe '#from_chars' do
it 'should handle an empty array' do
TwitterCldr::Utils::CodePoints.from_chars([]).should == []
end
it 'converts an array of chars to an array of unicode code points' do
TwitterCldr::Utils::CodePoints.from_chars(%w[e s p]).should == [0x65, 0x73, 0x70]
end
end
describe '#to_string' do
it 'should handle an empty array' do
TwitterCldr::Utils::CodePoints.to_string([]).should == ''
end
it 'converts an array of unicode code points to a string' do
TwitterCldr::Utils::CodePoints.to_string([0x65, 0x73, 0x70, 0x61, 0xF1, 0x6F, 0x6C]).should == 'español'
end
end
describe '#from_string' do
it 'should handle an empty string' do
TwitterCldr::Utils::CodePoints.from_string('').should == []
end
it 'converts a string into an array of unicode code points' do
TwitterCldr::Utils::CodePoints.from_string('español').should == [0x65, 0x73, 0x70, 0x61, 0xF1, 0x6F, 0x6C]
end
end
end | apache-2.0 |
kw217/omim | routing/osrm_router.cpp | 19229 | #include "cross_mwm_router.hpp"
#include "online_cross_fetcher.hpp"
#include "osrm2feature_map.hpp"
#include "osrm_helpers.hpp"
#include "osrm_router.hpp"
#include "turns_generator.hpp"
#include "platform/country_file.hpp"
#include "platform/platform.hpp"
#include "geometry/angles.hpp"
#include "geometry/distance.hpp"
#include "geometry/distance_on_sphere.hpp"
#include "indexer/ftypes_matcher.hpp"
#include "indexer/mercator.hpp"
#include "indexer/index.hpp"
#include "indexer/scales.hpp"
#include "coding/reader_wrapper.hpp"
#include "base/logging.hpp"
#include "base/math.hpp"
#include "base/scope_guard.hpp"
#include "base/timer.hpp"
#include "std/algorithm.hpp"
#include "std/limits.hpp"
#include "std/string.hpp"
#include "3party/osrm/osrm-backend/data_structures/query_edge.hpp"
#include "3party/osrm/osrm-backend/data_structures/internal_route_result.hpp"
#include "3party/osrm/osrm-backend/descriptors/description_factory.hpp"
#define INTERRUPT_WHEN_CANCELLED(DELEGATE) \
do \
{ \
if (DELEGATE.IsCancelled()) \
return Cancelled; \
} while (false)
namespace routing
{
namespace
{
size_t constexpr kMaxNodeCandidatesCount = 10;
double constexpr kFeatureFindingRectSideRadiusMeters = 1000.0;
double constexpr kMwmLoadedProgress = 10.0f;
double constexpr kPointsFoundProgress = 15.0f;
double constexpr kCrossPathFoundProgress = 50.0f;
double constexpr kPathFoundProgress = 70.0f;
// Osrm multiples seconds to 10, so we need to divide it back.
double constexpr kOSRMWeightToSecondsMultiplier = 1./10.;
} // namespace
// TODO (ldragunov) Switch all RawRouteData and incapsulate to own omim types.
using RawRouteData = InternalRouteResult;
// static
bool OsrmRouter::CheckRoutingAbility(m2::PointD const & startPoint, m2::PointD const & finalPoint,
TCountryFileFn const & countryFileFn, Index * index)
{
RoutingIndexManager manager(countryFileFn, *index);
return manager.GetMappingByPoint(startPoint)->IsValid() &&
manager.GetMappingByPoint(finalPoint)->IsValid();
}
OsrmRouter::OsrmRouter(Index * index, TCountryFileFn const & countryFileFn)
: m_pIndex(index), m_indexManager(countryFileFn, *index)
{
}
string OsrmRouter::GetName() const
{
return "vehicle";
}
void OsrmRouter::ClearState()
{
m_cachedTargets.clear();
m_cachedTargetPoint = m2::PointD::Zero();
m_indexManager.Clear();
}
bool OsrmRouter::FindRouteFromCases(TFeatureGraphNodeVec const & source,
TFeatureGraphNodeVec const & target, TDataFacade & facade,
RawRoutingResult & rawRoutingResult)
{
/// @todo (ldargunov) make more complex nearest edge turnaround
for (auto const & targetEdge : target)
for (auto const & sourceEdge : source)
if (FindSingleRoute(sourceEdge, targetEdge, facade, rawRoutingResult))
return true;
return false;
}
void FindGraphNodeOffsets(uint32_t const nodeId, m2::PointD const & point,
Index const * pIndex, TRoutingMappingPtr & mapping,
FeatureGraphNode & graphNode)
{
graphNode.segmentPoint = point;
helpers::Point2PhantomNode::Candidate best;
auto range = mapping->m_segMapping.GetSegmentsRange(nodeId);
for (size_t i = range.first; i < range.second; ++i)
{
OsrmMappingTypes::FtSeg s;
mapping->m_segMapping.GetSegmentByIndex(i, s);
if (!s.IsValid())
continue;
FeatureType ft;
Index::FeaturesLoaderGuard loader(*pIndex, mapping->GetMwmId());
loader.GetFeatureByIndex(s.m_fid, ft);
helpers::Point2PhantomNode::Candidate mappedSeg;
helpers::Point2PhantomNode::FindNearestSegment(ft, point, mappedSeg);
OsrmMappingTypes::FtSeg seg;
seg.m_fid = mappedSeg.m_fid;
seg.m_pointStart = mappedSeg.m_segIdx;
seg.m_pointEnd = mappedSeg.m_segIdx + 1;
if (!s.IsIntersect(seg))
continue;
if (mappedSeg.m_dist < best.m_dist)
best = mappedSeg;
}
CHECK_NOT_EQUAL(best.m_fid, kInvalidFid, ());
graphNode.segment.m_fid = best.m_fid;
graphNode.segment.m_pointStart = best.m_segIdx;
graphNode.segment.m_pointEnd = best.m_segIdx + 1;
}
void CalculatePhantomNodeForCross(TRoutingMappingPtr & mapping, FeatureGraphNode & graphNode,
Index const * pIndex, bool forward)
{
if (graphNode.segment.IsValid())
return;
uint32_t nodeId;
if (forward)
nodeId = graphNode.node.forward_node_id;
else
nodeId = graphNode.node.reverse_node_id;
CHECK_NOT_EQUAL(nodeId, INVALID_NODE_ID, ());
FindGraphNodeOffsets(nodeId, graphNode.segmentPoint, pIndex, mapping, graphNode);
}
// TODO (ldragunov) move this function to cross mwm router
// TODO (ldragunov) process case when the start and the finish points are placed on the same edge.
OsrmRouter::ResultCode OsrmRouter::MakeRouteFromCrossesPath(TCheckedPath const & path,
RouterDelegate const & delegate,
Route & route)
{
Route::TTurns TurnsDir;
Route::TTimes Times;
vector<m2::PointD> Points;
for (RoutePathCross cross : path)
{
ASSERT_EQUAL(cross.startNode.mwmId, cross.finalNode.mwmId, ());
RawRoutingResult routingResult;
TRoutingMappingPtr mwmMapping = m_indexManager.GetMappingById(cross.startNode.mwmId);
ASSERT(mwmMapping->IsValid(), ());
MappingGuard mwmMappingGuard(mwmMapping);
UNUSED_VALUE(mwmMappingGuard);
CalculatePhantomNodeForCross(mwmMapping, cross.startNode, m_pIndex, true /* forward */);
CalculatePhantomNodeForCross(mwmMapping, cross.finalNode, m_pIndex, false /* forward */);
if (!FindSingleRoute(cross.startNode, cross.finalNode, mwmMapping->m_dataFacade, routingResult))
return OsrmRouter::RouteNotFound;
if (!Points.empty())
{
// Remove road end point and turn instruction.
Points.pop_back();
TurnsDir.pop_back();
Times.pop_back();
}
// Get annotated route.
Route::TTurns mwmTurnsDir;
Route::TTimes mwmTimes;
vector<m2::PointD> mwmPoints;
MakeTurnAnnotation(routingResult, mwmMapping, delegate, mwmPoints, mwmTurnsDir, mwmTimes);
// Connect annotated route.
auto const pSize = static_cast<uint32_t>(Points.size());
for (auto turn : mwmTurnsDir)
{
if (turn.m_index == 0)
continue;
turn.m_index += pSize;
TurnsDir.push_back(turn);
}
double const estimationTime = Times.size() ? Times.back().second : 0.0;
for (auto time : mwmTimes)
{
if (time.first == 0)
continue;
time.first += pSize;
time.second += estimationTime;
Times.push_back(time);
}
Points.insert(Points.end(), mwmPoints.begin(), mwmPoints.end());
}
route.SetGeometry(Points.begin(), Points.end());
route.SetTurnInstructions(TurnsDir);
route.SetSectionTimes(Times);
return OsrmRouter::NoError;
}
OsrmRouter::ResultCode OsrmRouter::CalculateRoute(m2::PointD const & startPoint,
m2::PointD const & startDirection,
m2::PointD const & finalPoint,
RouterDelegate const & delegate, Route & route)
{
my::HighResTimer timer(true);
m_indexManager.Clear(); // TODO (Dragunov) make proper index manager cleaning
TRoutingMappingPtr startMapping = m_indexManager.GetMappingByPoint(startPoint);
TRoutingMappingPtr targetMapping = m_indexManager.GetMappingByPoint(finalPoint);
if (!startMapping->IsValid())
{
ResultCode const code = startMapping->GetError();
if (code != NoError)
{
route.AddAbsentCountry(startMapping->GetCountryName());
return code;
}
return IRouter::StartPointNotFound;
}
if (!targetMapping->IsValid())
{
ResultCode const code = targetMapping->GetError();
if (code != NoError)
{
route.AddAbsentCountry(targetMapping->GetCountryName());
return code;
}
return IRouter::EndPointNotFound;
}
MappingGuard startMappingGuard(startMapping);
MappingGuard finalMappingGuard(targetMapping);
UNUSED_VALUE(startMappingGuard);
UNUSED_VALUE(finalMappingGuard);
LOG(LINFO, ("Duration of the MWM loading", timer.ElapsedNano()));
timer.Reset();
delegate.OnProgress(kMwmLoadedProgress);
// 3. Find start/end nodes.
TFeatureGraphNodeVec startTask;
{
ResultCode const code = FindPhantomNodes(startPoint, startDirection,
startTask, kMaxNodeCandidatesCount, startMapping);
if (code != NoError)
return code;
}
{
if (finalPoint != m_cachedTargetPoint)
{
ResultCode const code =
FindPhantomNodes(finalPoint, m2::PointD::Zero(),
m_cachedTargets, kMaxNodeCandidatesCount, targetMapping);
if (code != NoError)
return code;
m_cachedTargetPoint = finalPoint;
}
}
INTERRUPT_WHEN_CANCELLED(delegate);
LOG(LINFO, ("Duration of the start/stop points lookup", timer.ElapsedNano()));
timer.Reset();
delegate.OnProgress(kPointsFoundProgress);
// 4. Find route.
RawRoutingResult routingResult;
// 4.1 Single mwm case
if (startMapping->GetMwmId() == targetMapping->GetMwmId())
{
LOG(LINFO, ("Single mwm routing case"));
m_indexManager.ForEachMapping([](pair<string, TRoutingMappingPtr> const & indexPair)
{
indexPair.second->FreeCrossContext();
});
if (!FindRouteFromCases(startTask, m_cachedTargets, startMapping->m_dataFacade,
routingResult))
{
return RouteNotFound;
}
INTERRUPT_WHEN_CANCELLED(delegate);
delegate.OnProgress(kPathFoundProgress);
// 5. Restore route.
Route::TTurns turnsDir;
Route::TTimes times;
vector<m2::PointD> points;
MakeTurnAnnotation(routingResult, startMapping, delegate, points, turnsDir, times);
route.SetGeometry(points.begin(), points.end());
route.SetTurnInstructions(turnsDir);
route.SetSectionTimes(times);
return NoError;
}
else //4.2 Multiple mwm case
{
LOG(LINFO, ("Multiple mwm routing case"));
TCheckedPath finalPath;
ResultCode code = CalculateCrossMwmPath(startTask, m_cachedTargets, m_indexManager, delegate,
finalPath);
timer.Reset();
INTERRUPT_WHEN_CANCELLED(delegate);
delegate.OnProgress(kCrossPathFoundProgress);
// 5. Make generate answer
if (code == NoError)
{
auto code = MakeRouteFromCrossesPath(finalPath, delegate, route);
// Manually free all cross context allocations before geometry unpacking.
m_indexManager.ForEachMapping([](pair<string, TRoutingMappingPtr> const & indexPair)
{
indexPair.second->FreeCrossContext();
});
LOG(LINFO, ("Make final route", timer.ElapsedNano()));
timer.Reset();
return code;
}
return OsrmRouter::RouteNotFound;
}
}
IRouter::ResultCode OsrmRouter::FindPhantomNodes(m2::PointD const & point,
m2::PointD const & direction,
TFeatureGraphNodeVec & res, size_t maxCount,
TRoutingMappingPtr const & mapping)
{
ASSERT(mapping, ());
helpers::Point2PhantomNode getter(*mapping, *m_pIndex, direction);
getter.SetPoint(point);
m_pIndex->ForEachInRectForMWM(getter, MercatorBounds::RectByCenterXYAndSizeInMeters(
point, kFeatureFindingRectSideRadiusMeters),
scales::GetUpperScale(), mapping->GetMwmId());
if (!getter.HasCandidates())
return RouteNotFound;
getter.MakeResult(res, maxCount);
return NoError;
}
// @todo(vbykoianko) This method shall to be refactored. It shall be split into several
// methods. All the functionality shall be moved to the turns_generator unit.
// @todo(vbykoianko) For the time being MakeTurnAnnotation generates the turn annotation
// and the route polyline at the same time. It is better to generate it separately
// to be able to use the route without turn annotation.
OsrmRouter::ResultCode OsrmRouter::MakeTurnAnnotation(
RawRoutingResult const & routingResult, TRoutingMappingPtr const & mapping,
RouterDelegate const & delegate, vector<m2::PointD> & points, Route::TTurns & turnsDir,
Route::TTimes & times)
{
ASSERT(mapping, ());
typedef OsrmMappingTypes::FtSeg TSeg;
TSeg const & segBegin = routingResult.sourceEdge.segment;
TSeg const & segEnd = routingResult.targetEdge.segment;
double estimatedTime = 0;
LOG(LDEBUG, ("Shortest path length:", routingResult.shortestPathLength));
#ifdef DEBUG
size_t lastIdx = 0;
#endif
for (auto const & pathSegments : routingResult.unpackedPathSegments)
{
INTERRUPT_WHEN_CANCELLED(delegate);
// Get all computed route coordinates.
size_t const numSegments = pathSegments.size();
for (size_t segmentIndex = 0; segmentIndex < numSegments; ++segmentIndex)
{
RawPathData const & pathData = pathSegments[segmentIndex];
if (segmentIndex > 0 && !points.empty())
{
turns::TurnItem turnItem;
turnItem.m_index = static_cast<uint32_t>(points.size() - 1);
turns::TurnInfo turnInfo(*mapping, pathSegments[segmentIndex - 1].node, pathSegments[segmentIndex].node);
turns::GetTurnDirection(*m_pIndex, turnInfo, turnItem);
// ETA information.
double const nodeTimeSeconds = pathData.segmentWeight * kOSRMWeightToSecondsMultiplier;
#ifdef DEBUG
double distMeters = 0.0;
for (size_t k = lastIdx + 1; k < points.size(); ++k)
distMeters += MercatorBounds::DistanceOnEarth(points[k - 1], points[k]);
LOG(LDEBUG, ("Speed:", 3.6 * distMeters / nodeTimeSeconds, "kmph; Dist:", distMeters, "Time:",
nodeTimeSeconds, "s", lastIdx, "e", points.size(), "source:", turnItem.m_sourceName,
"target:", turnItem.m_targetName));
lastIdx = points.size();
#endif
estimatedTime += nodeTimeSeconds;
times.push_back(Route::TTimeItem(points.size(), estimatedTime));
// Lane information.
if (turnItem.m_turn != turns::TurnDirection::NoTurn)
{
turnItem.m_lanes = turns::GetLanesInfo(pathSegments[segmentIndex - 1].node,
*mapping, turns::GetLastSegmentPointIndex, *m_pIndex);
turnsDir.push_back(move(turnItem));
}
}
buffer_vector<TSeg, 8> buffer;
mapping->m_segMapping.ForEachFtSeg(pathData.node, MakeBackInsertFunctor(buffer));
auto FindIntersectingSeg = [&buffer] (TSeg const & seg) -> size_t
{
ASSERT(seg.IsValid(), ());
auto const it = find_if(buffer.begin(), buffer.end(), [&seg] (OsrmMappingTypes::FtSeg const & s)
{
return s.IsIntersect(seg);
});
ASSERT(it != buffer.end(), ());
return distance(buffer.begin(), it);
};
//m_mapping.DumpSegmentByNode(path_data.node);
bool const isStartSegment = (segmentIndex == 0);
bool const isEndSegment = (segmentIndex == numSegments - 1);
// Calculate estimated time for a start and a end node cases.
if (isStartSegment || isEndSegment)
{
double multiplier = 1.;
double weight = 0.;
if (isStartSegment)
{
// -1 because a whole node weight is already in esimated time, and we need to substruct time
// form a node start to a user point.
multiplier = -1.;
auto const & node = routingResult.sourceEdge.node;
if (pathSegments[segmentIndex].node == node.forward_node_id)
weight = node.forward_weight;
else
weight = node.reverse_weight;
}
if (isEndSegment)
{
auto const & node = routingResult.targetEdge.node;
if (pathSegments[segmentIndex].node == node.forward_node_id)
weight = node.forward_weight;
else
weight = node.reverse_weight;
}
estimatedTime += multiplier * kOSRMWeightToSecondsMultiplier * weight;
}
size_t startK = 0, endK = buffer.size();
if (isStartSegment)
{
if (!segBegin.IsValid())
continue;
startK = FindIntersectingSeg(segBegin);
}
if (isEndSegment)
{
if (!segEnd.IsValid())
continue;
endK = FindIntersectingSeg(segEnd) + 1;
}
for (size_t k = startK; k < endK; ++k)
{
TSeg const & seg = buffer[k];
FeatureType ft;
Index::FeaturesLoaderGuard loader(*m_pIndex, mapping->GetMwmId());
loader.GetFeatureByIndex(seg.m_fid, ft);
ft.ParseGeometry(FeatureType::BEST_GEOMETRY);
auto startIdx = seg.m_pointStart;
auto endIdx = seg.m_pointEnd;
if (isStartSegment && k == startK && segBegin.IsValid())
startIdx = (seg.m_pointEnd > seg.m_pointStart) ? segBegin.m_pointStart : segBegin.m_pointEnd;
if (isEndSegment && k == endK - 1 && segEnd.IsValid())
endIdx = (seg.m_pointEnd > seg.m_pointStart) ? segEnd.m_pointEnd : segEnd.m_pointStart;
if (startIdx < endIdx)
{
for (auto idx = startIdx; idx <= endIdx; ++idx)
points.push_back(ft.GetPoint(idx));
}
else
{
// I use big signed type because endIdx can be 0.
for (int64_t idx = startIdx; idx >= endIdx; --idx)
points.push_back(ft.GetPoint(idx));
}
}
}
}
if (points.size() < 2)
return RouteNotFound;
if (routingResult.sourceEdge.segment.IsValid())
points.front() = routingResult.sourceEdge.segmentPoint;
if (routingResult.targetEdge.segment.IsValid())
points.back() = routingResult.targetEdge.segmentPoint;
times.push_back(Route::TTimeItem(points.size() - 1, estimatedTime));
if (routingResult.targetEdge.segment.IsValid())
{
turnsDir.emplace_back(
turns::TurnItem(static_cast<uint32_t>(points.size()) - 1, turns::TurnDirection::ReachedYourDestination));
}
turns::FixupTurns(points, turnsDir);
#ifdef DEBUG
for (auto t : turnsDir)
{
LOG(LDEBUG, (turns::GetTurnString(t.m_turn), ":", t.m_index, t.m_sourceName, "-", t.m_targetName, "exit:", t.m_exitNum));
}
size_t last = 0;
double lastTime = 0;
for (Route::TTimeItem & t : times)
{
double dist = 0;
for (size_t i = last + 1; i <= t.first; ++i)
dist += MercatorBounds::DistanceOnEarth(points[i - 1], points[i]);
double time = t.second - lastTime;
LOG(LDEBUG, ("distance:", dist, "start:", last, "end:", t.first, "Time:", time, "Speed:", 3.6 * dist / time));
last = t.first;
lastTime = t.second;
}
#endif
LOG(LDEBUG, ("Estimated time:", estimatedTime, "s"));
return OsrmRouter::NoError;
}
} // namespace routing
| apache-2.0 |
PMBio/limix | src/limix/modules/CVarianceDecomposition.cpp | 18460 | // Copyright(c) 2014, The LIMIX developers(Christoph Lippert, Paolo Francesco Casale, Oliver Stegle)
//
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
#include "CVarianceDecomposition.h"
#include "limix/utils/matrix_helper.h"
#include "limix/mean/CSumLinear.h"
#include "limix/mean/CKroneckerMean.h"
#include "limix/gp/gp_kronSum.h"
#include "limix/LMM/lmm.h"
namespace limix {
/* AVarianceTerm */
AVarianceTerm::AVarianceTerm() {
this->Knull=true;
this->fitted=false;
this->is_init=false;
}
AVarianceTerm::~AVarianceTerm() {
}
muint_t AVarianceTerm::getNumberIndividuals() const
{
if (Knull)
throw CLimixException("CSingleTraitTerm: K needs to be set!");
return (muint_t)this->K.cols();
}
void AVarianceTerm::setK(const MatrixXd& K)
{
if(K.rows()!=K.cols())
throw CLimixException("AVarianceTerm: K needs to be a squared matrix!");
this->K=K;
Kcf = PFixedCF(new CFixedCF(this->K));
this->Knull = false;
}
void AVarianceTerm::agetK(MatrixXd *out) const
{
(*out) = this->K;
}
/* CSingleTraitTerm */
CSingleTraitTerm::CSingleTraitTerm():AVarianceTerm() {
}
CSingleTraitTerm::CSingleTraitTerm(const MatrixXd& K):AVarianceTerm() {
this->setK(K);
}
void CSingleTraitTerm::setSampleFilter(const MatrixXb& filter)
{
throw CLimixException("not implementation error: setSampleFilter");
}
CSingleTraitTerm::~CSingleTraitTerm() {
}
PCovarianceFunction CSingleTraitTerm::getTraitCovar() const
{
throw CLimixException("CSingleTraitTerm: Not implemented for SingleTraitTerm");
}
void CSingleTraitTerm::setScales(const VectorXd& scales)
{
if (Knull)
throw CLimixException("CSingleTraitTerm: K needs to be set!");
this->Kcf->setParams(scales);
}
void CSingleTraitTerm::agetScales(VectorXd* out) const
{
if (Knull)
throw CLimixException("CSingleTraitTerm: K needs to be set!");
(this->Kcf)->agetParams(out);
}
muint_t CSingleTraitTerm::getNumberScales() const
{
if (Knull)
throw CLimixException("CSingleTraitTerm: K needs to be set!");
return this->Kcf->getNumberParams();
}
void CSingleTraitTerm::initTerm()
{
if (Knull)
throw CLimixException("CSingleTraitTerm: K needs to be set!");
this->is_init=true;
}
PCovarianceFunction CSingleTraitTerm::getCovariance() const
{
if (!is_init) throw CLimixException("CSingleTraitTerm: the term is not initialised!");
return this->Kcf;
}
/* CMultiTraitTerm */
CMultiTraitTerm::CMultiTraitTerm(muint_t P):AVarianceTerm()
{
this->P=P;
this->isNull=true;
}
CMultiTraitTerm::CMultiTraitTerm(muint_t P, PCovarianceFunction traitCovar, const MatrixXd& K):AVarianceTerm()
{
this->P=P;
this->setTraitCovar(traitCovar);
this->setK(K);
}
CMultiTraitTerm::~CMultiTraitTerm()
{
}
void CMultiTraitTerm::setTraitCovar(PCovarianceFunction traitCovar)
{
this->traitCovariance=traitCovar;
isNull=false;
}
PCovarianceFunction CMultiTraitTerm::getTraitCovar() const
{
return this->traitCovariance;
}
void CMultiTraitTerm::setScales(const VectorXd& scales)
{
if (isNull)
throw CLimixException("CMultiTraitTerm: traitCovariance needs to be set!");
this->traitCovariance->setParams(scales);
}
void CMultiTraitTerm::agetScales(VectorXd* out) const
{
if (isNull)
throw CLimixException("CMultiTraitTerm: traitCovariance needs to be set!");
this->traitCovariance->agetParams(out);
}
muint_t CMultiTraitTerm::getNumberScales() const
{
if (isNull)
throw CLimixException("CMultiTraitTerm: traitCovariance needs to be set!");
return this->traitCovariance->getNumberParams();
}
void CMultiTraitTerm::initTerm()
{
if (isNull) throw CLimixException("CMultiTraitTerm: traitCovariance needs to be set!");
if (Knull) throw CLimixException("CMultiTraitTerm: K needs to be set!");
Kcf->setParams(VectorXd::Ones(1));
Kcf->setParamMask(VectorXd::Zero(1));
// InterTrait Covariance Matrix
covariance = PKroneckerCF(new CKroneckerCF(traitCovariance,Kcf));
this->is_init=true;
}
void CMultiTraitTerm::setSampleFilter(const MatrixXb& filter)
{
if (!is_init)
throw CLimixException("sample Filter can only be aplied after the term is initialized");
if (filter.rows()!=this->getNumberIndividuals()*this->P)
throw CLimixException("filter dimensions do not match sample covariance");
//linearize filter
MatrixXb filter_ = filter;
filter_.resize(filter.rows()*filter.cols(),1);
//get full kronecker index and subset
MatrixXi kroneckerindex;
CKroneckerCF::createKroneckerIndex(&kroneckerindex,this->P,this->getNumberIndividuals());
//subset
MatrixXi kroneckerindex_;
//std::cout << kroneckerindex;
//std::cout << "\n" << "-----------------" << "\n";
//kroneckerindex_.resize(kroneckerindex.rows(),kroneckerindex.cols());
slice(kroneckerindex,filter_,kroneckerindex_);
//std::cout << kroneckerindex_;
//std::cout << "\n" << "-----------------" << "\n";
//set as Kroneckerindex
this->covariance->setKroneckerIndicator(kroneckerindex_);
}
PCovarianceFunction CMultiTraitTerm::getCovariance() const
{
if (!is_init) throw CLimixException("CMultiTraitTerm: the term is not initialised!");
return this->covariance;
}
/* CVarianceDecomposition */
CVarianceDecomposition::CVarianceDecomposition(const MatrixXd& pheno){
this->setPheno(pheno);
this->is_init=false;
this->fast=false;
}
CVarianceDecomposition::~CVarianceDecomposition(){
}
void CVarianceDecomposition::clear()
{
this->fixedEffs.clear();
this->designs.clear();
this->terms.clear();
this->is_init=false;
}
void CVarianceDecomposition::addFixedEffTerm(const MatrixXd& design, const MatrixXd& fixed)
{
//if ((muint_t)fixed.cols()!=(muint_t)1 || (muint_t)fixed.rows()!=this->N)
if ((muint_t)fixed.cols()<(muint_t)1 || (muint_t)fixed.rows()!=this->N)
throw CLimixException("CVarianceDecomposition: the fixed effect must have shape (N,1+)");
if ((muint_t)design.cols()!=(muint_t)P || (muint_t)design.rows()>(muint_t)P)
throw CLimixException("CVarianceDecomposition: the design must have P columns and cannot have more than P rows");
fixedEffs.push_back(fixed);
designs.push_back(design);
this->is_init=false;
}
void CVarianceDecomposition::addFixedEffTerm(const MatrixXd& fixed)
{
MatrixXd design = MatrixXd::Identity(P,P);
addFixedEffTerm(fixed,design);
}
void CVarianceDecomposition::getFixed(MatrixXd *out, const muint_t i) const
{
if (i>=this->getNumberFixedEffs())
throw CLimixException("CVarianceDecomposition: value out of range");
(*out)=this->fixedEffs[i];
}
void CVarianceDecomposition::getDesign(MatrixXd *out, const muint_t i) const
{
if (i>=this->getNumberFixedEffs())
throw CLimixException("CVarianceDecomposition: value out of range");
(*out)=this->designs[i];
}
void CVarianceDecomposition::clearFixedEffs()
{
this->fixedEffs.clear();
this->designs.clear();
this->is_init=false;
}
muint_t CVarianceDecomposition::getNumberFixedEffs() const
{
return (muint_t)(this->fixedEffs.size());
}
void CVarianceDecomposition::setPheno(const MatrixXd& pheno)
{
// Set Phenoa and dimensions
this->pheno = pheno;
this->N = (muint_t)pheno.rows();
this->P = (muint_t)pheno.cols();
//check whether phenotype has NANs?
phenoNAN = isnan(this->pheno);
this->phenoNANany = phenoNAN.any();
}
void CVarianceDecomposition::getPheno(MatrixXd *out) const
{
(*out)=this->pheno;
}
void CVarianceDecomposition::addTerm(PVarianceTerm term)
{
if (term->getName()=="CMultiTraitTerm")
if (term->getNumberTraits()!=this->P)
throw CLimixException("CVarianceDecomposition: the term has incompatible number of traits");
if (term->getNumberIndividuals()!=this->N)
throw CLimixException("CVarianceDecomposition: the term has incompatible number of individual");
else if (term->getName()=="CSingleTraitTerm")
if (term->getNumberIndividuals()!=this->N*this->P)
throw CLimixException("CVarianceDecomposition: the single trait term must have dimensions NP");
terms.push_back(term);
this->is_init=false;
}
void CVarianceDecomposition::addTerm(const MatrixXd& K)
{
//TODO
}
void CVarianceDecomposition::addTerm(PCovarianceFunction traitCovar, const MatrixXd& K)
{
this->addTerm(PMultiTraitTerm(new CMultiTraitTerm(traitCovar->Kdim(),traitCovar,K)));
}
PVarianceTerm CVarianceDecomposition::getTerm(muint_t i) const
{
if (i>=this->getNumberTerms())
throw CLimixException("CVarianceDecomposition: value out of range");
return this->terms[i];
}
void CVarianceDecomposition::clearTerms()
{
this->terms.clear();
}
muint_t CVarianceDecomposition::getNumberTerms() const
{
return (muint_t)(terms.size());
}
void CVarianceDecomposition::setScales(const VectorXd& scales) const
{
if (this->is_init==0)
throw CLimixException("CVarianceDecomposition: CVarianceDecomposition needs to be initialised");
this->covar->setParams(scales);
}
void CVarianceDecomposition::setScales(muint_t i,const VectorXd& scales) const
{
if (i>=this->getNumberTerms())
throw CLimixException("CVarianceDecomposition: value out of range");
this->terms[i]->setScales(scales);
}
void CVarianceDecomposition::agetScales(muint_t i, VectorXd* out) const
{
if (i>=this->getNumberTerms())
throw CLimixException("CVarianceDecomposition: value out of range");
this->terms[i]->agetScales(out);
}
void CVarianceDecomposition::agetScales(VectorXd* out)
{
(*out).resize(this->getNumberScales(),1);
muint_t row=0;
for(PVarianceTermVec::iterator iter = this->terms.begin(); iter!=this->terms.end();iter++)
{
PVarianceTerm term = iter[0];
VectorXd scales;
term->agetScales(&scales);
(*out).block(row,0,term->getNumberScales(),1)=scales;
row+=term->getNumberScales();
}
}
muint_t CVarianceDecomposition::getNumberScales()
{
muint_t out=0;
for(PVarianceTermVec::iterator iter = this->terms.begin(); iter!=this->terms.end();iter++)
{
PVarianceTerm term = iter[0];
out+=term->getNumberScales();
}
return out;
}
void CVarianceDecomposition::initGP(bool fast)
{
if (fast) initGPkronSum();
else initGPbase();
}
void CVarianceDecomposition::initGPparams()
{
/* get params from covariance matrices and set them to the GP object
*/
if (is_init!=1)
throw CLimixException("CVarianceDecomposition:: initGP before initGPparams");
CGPHyperParams params;
if (fast) {
params["covarr1"] = static_pointer_cast<CGPkronSum>(gp)->getCovarr1()->getParams();
params["covarc1"] = static_pointer_cast<CGPkronSum>(gp)->getCovarc1()->getParams();
params["covarr2"] = static_pointer_cast<CGPkronSum>(gp)->getCovarr2()->getParams();
params["covarc2"] = static_pointer_cast<CGPkronSum>(gp)->getCovarc2()->getParams();
params["dataTerm"] = gp->getDataTerm()->getParams();
gp->setParams(params);
}
else {
params["covar"] = gp->getCovar()->getParams();
muint_t ncols = static_pointer_cast<CLinearMean>(gp->getDataTerm())->getRowsParams();
params["dataTerm"] = MatrixXd::Zero(ncols,1);
gp->setParams(params);
}
}
void CVarianceDecomposition::initGPbase()
{
this->covar = PSumCF(new CSumCF());
// Init Covariances and sum them
for(PVarianceTermVec::iterator iter = this->terms.begin(); iter!=this->terms.end();iter++)
{
PVarianceTerm term = iter[0];
term->initTerm();
this->covar->addCovariance(iter[0]->getCovariance());
}
//Build Fixed Effect Term
// count number of cols
muint_t numberCols = 0;
MatrixXdVec::const_iterator design_iter = this->designs.begin();
MatrixXdVec::const_iterator fixed_iter = this->fixedEffs.begin();
for (; design_iter != this->designs.end(); design_iter++, fixed_iter++)
numberCols += design_iter[0].rows()*fixed_iter[0].cols();
//define fixed for CLinearMean
MatrixXd fixed(this->N*this->P,numberCols);
design_iter = this->designs.begin();
fixed_iter = this->fixedEffs.begin();
muint_t ncols = 0;
for (; design_iter != this->designs.end(); design_iter++, fixed_iter++)
{
MatrixXd part;
akron(part,design_iter[0].transpose(),fixed_iter[0]);
//fixed.block(0,ncols,this->N*this->P,design_iter[0].rows())=part;//Christoph: bug? should also take into account the number of columns in the fixed effects
fixed.block(0,ncols,this->N*this->P,design_iter[0].rows()*fixed_iter[0].cols())=part;//Christoph: fix
ncols+=design_iter[0].rows()*fixed_iter[0].cols();
}
//vectorize phenotype
MatrixXd y = this->pheno;
y.resize(this->N*this->P,1);
//do we have to deal with missing values
//phenoNANany = true;
if(this->phenoNANany)
{
//1. vectorize missing values
MatrixXb Iselect = this->phenoNAN;
Iselect.resize(this->N*this->P,1);
//invert
Iselect = Iselect.unaryExpr(std::ptr_fun(negate));
//2. select on y
MatrixXd _y;
slice(y,Iselect,_y);
y = _y;
//3 fixed effecfs
MatrixXd _fixed;
slice(fixed,Iselect,_fixed);
fixed = _fixed;
//4. set filter in terms
for(PVarianceTermVec::iterator iter = this->terms.begin(); iter!=this->terms.end();iter++)
{
PVarianceTerm term = iter[0];
term->setSampleFilter(Iselect);
}
}
//Define Likelihood, LinearMean and GP
PLikNormalNULL lik(new CLikNormalNULL());
PLinearMean mean(new CLinearMean(y,fixed));
this->gp = PGPbase(new CGPbase(covar, lik, mean));
this->gp->setY(y);
this->fast=false;
this->is_init=1;
//Initialize Params
this->initGPparams();
//optimizer
this->opt = PGPopt(new CGPopt(gp));
}
void CVarianceDecomposition::initGPkronSum()
{
//check whether exact Kronecker structure?
if (this->phenoNANany)
throw CLimixException("GPKronSum (fast inference) can only be used for full kronecker structured data");
if (this->getNumberTerms()!=2)
throw CLimixException("CVarianceDecomposition: fastGP only works for two terms");
if (this->getNumberTraits()<2)
throw CLimixException("CVarianceDecomposition: supported only for multiple traits");
if (this->is_init && this->fast) {
this->gp->setY(pheno);
CGPHyperParams params = this->gp->getParams();
VectorXd covarParams;
this->agetScales(0,&covarParams); params["covarc1"]=covarParams;
this->agetScales(1,&covarParams); params["covarc2"]=covarParams;
params["dataTerm"] = MatrixXd::Zero(params["dataTerm"].rows(),params["dataTerm"].cols());
this->gp->setParams(params);
}
else
{
//init covars
this->terms[0]->initTerm();
this->terms[1]->initTerm();
PCovarianceFunction covarr1 = this->terms[0]->getKcf();
PCovarianceFunction covarr2 = this->terms[1]->getKcf();
PCovarianceFunction covarc1 = this->terms[0]->getTraitCovar();
PCovarianceFunction covarc2 = this->terms[1]->getTraitCovar();
//init dataTerm
MatrixXdVec::const_iterator fixed_iter = this->fixedEffs.begin();
MatrixXdVec::const_iterator design_iter = this->designs.begin();
PSumLinear mean(new CSumLinear());
for (; design_iter != this->designs.end(); design_iter++, fixed_iter++) {
MatrixXd A = design_iter[0];
MatrixXd F = fixed_iter[0];
MatrixXd W = MatrixXd::Zero(F.cols(),A.rows());
mean->appendTerm(PKroneckerMean(new CKroneckerMean(pheno,W,F,A)));
}
// init lik
PLikNormalNULL lik(new CLikNormalNULL());
//define gpKronSum
this->gp = PGPkronSum(new CGPkronSum(pheno, covarr1, covarc1, covarr2, covarc2, lik, mean));
this->fast=true;
this->is_init=1;
//Initialize Params
this->initGPparams();
//Optimizer GP
this->opt = PGPopt(new CGPopt(gp));
}
}
bool CVarianceDecomposition::trainGP()
{
bool conv = false;
// initGP if is not init
if (this->is_init==0) this->initGP();
//train GP
conv = this->opt->opt();
//check convergence
VectorXd scales;
this->agetScales(&scales);
conv &= (scales.unaryExpr(std::bind2nd( std::ptr_fun<double,double,double>(pow), 2) ).maxCoeff()<(mfloat_t)10.0);
return conv;
}
void CVarianceDecomposition::getFixedEffects(VectorXd* out)
{
(*out)=this->gp->getParams()["dataTerm"];
}
mfloat_t CVarianceDecomposition::getLML()
{
if (!this->is_init)
throw CLimixException("CVarianceDecomposition: the term is not initialised!");
return -1.*this->gp->LML();
}
mfloat_t CVarianceDecomposition::getLMLgrad()
{
if (!this->is_init)
throw CLimixException("CVarianceDecomposition: the term is not initialised!");
float out;
if (this->fast) out = getLMLgradGPkronSum();
else out = getLMLgradGPbase();
return out;
}
mfloat_t CVarianceDecomposition::getLMLgradGPbase()
{
if (!this->is_init)
throw CLimixException("CVarianceDecomposition: the term is not initialised!");
mfloat_t out = 0;
// Squared Norm of LMLgrad["covar"]
VectorXd grad = this->gp->LMLgrad()["covar"];
VectorXd filter = this->gp->getParamMask()["covar"];
for (muint_t i=0; i<(muint_t)grad.rows(); i++)
if (filter(i)==1) out +=std::pow(grad(i),2);
// Squared Norm of LMLgrad["dataTerm"]
grad = this->gp->LMLgrad()["dataTerm"];
for (muint_t i=0; i<(muint_t)grad.rows(); i++) out +=std::pow(grad(i),2);
// Square Root
out = std::sqrt(out);
return out;
}
mfloat_t CVarianceDecomposition::getLMLgradGPkronSum()
{
mfloat_t out = 0;
VectorXd grad = this->gp->LMLgrad()["covarc1"];
for (muint_t i=0; i<(muint_t)grad.rows(); i++) out +=std::pow(grad(i),2);
grad = this->gp->LMLgrad()["covarc2"];
for (muint_t i=0; i<(muint_t)grad.rows(); i++) out +=std::pow(grad(i),2);
// Square Root
out = std::sqrt(out);
return out;
}
void CVarianceDecomposition::aestimateHeritability(VectorXd* out, const MatrixXd& Y, const MatrixXd& fixed, const MatrixXd& K)
{
/*
* estimates the genetic and the noise variance and creates a matrirx object to return them
*/
MatrixXd covs;
if(isnull(fixed))
covs = MatrixXd::Ones(Y.rows(),1);
else
covs = fixed;
//use mixed model code to estimate heritabiltiy
CLMM lmm;
lmm.setK(K);
lmm.setSNPs(MatrixXd::Zero(K.rows(),1));
lmm.setPheno(Y);
lmm.setCovs(covs);
lmm.setVarcompApprox0(-20, 20, 1000);
lmm.process();
mfloat_t delta0 = exp(lmm.getLdelta0()(0,0));
mfloat_t Vtotal = exp(lmm.getLSigma()(0,0));
VectorXd rv = VectorXd(2);
rv(0) = Vtotal;
rv(1) = Vtotal*delta0;
(*out) =rv;
}
} //end:: namespace
| apache-2.0 |
endeavourhealth/EDS | src/eds-messaging-core/src/main/java/org/endeavourhealth/core/configuration/Credentials.java | 2099 |
package org.endeavourhealth.core.configuration;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Credentials complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Credentials">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Username" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="Password" type="{http://www.w3.org/2001/XMLSchema}string"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Credentials", propOrder = {
"username",
"password"
})
public class Credentials {
@XmlElement(name = "Username", required = true)
protected String username;
@XmlElement(name = "Password", required = true)
protected String password;
/**
* Gets the value of the username property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUsername() {
return username;
}
/**
* Sets the value of the username property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUsername(String value) {
this.username = value;
}
/**
* Gets the value of the password property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPassword() {
return password;
}
/**
* Sets the value of the password property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPassword(String value) {
this.password = value;
}
}
| apache-2.0 |
zqh110110/base_http_rxjava_databing_commonutil | android-util-master/src/main/java/com/xjf/repository/utils/InputMethodUtils.java | 3005 | /*
* Copyright (C) 2013 Peng fei Pan <sky@xiaopan.me>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xjf.repository.utils;
import android.app.Activity;
import android.content.Context;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
public class InputMethodUtils {
/**
* 为给定的编辑器开启软键盘
*
* @param editText 给定的编辑器
*/
public static void openSoftKeyboard(Context context, EditText editText) {
editText.requestFocus();
InputMethodManager inputMethodManager
= (InputMethodManager) context.getSystemService(
Context.INPUT_METHOD_SERVICE);
inputMethodManager.showSoftInput(editText,
InputMethodManager.SHOW_IMPLICIT);
ViewUtils.setEditTextSelectionToEnd(editText);
}
/**
* 关闭软键盘
*/
public static void closeSoftKeyboard(Activity activity) {
InputMethodManager inputMethodManager
= (InputMethodManager) activity.getSystemService(
Context.INPUT_METHOD_SERVICE);
//如果软键盘已经开启
if (inputMethodManager.isActive()) {
// inputMethodManager.hideSoftInputFromWindow(activity.getCurrentFocus().getWindowToken(),InputMethodManager.HIDE_NOT_ALWAYS);//会报获取焦点空
if (activity.getCurrentFocus() != null) {
inputMethodManager.hideSoftInputFromWindow(activity.getCurrentFocus().getWindowToken(), InputMethodManager.HIDE_NOT_ALWAYS);
}
}
}
/**
* 切换软键盘的状态
*/
public static void toggleSoftKeyboardState(Context context) {
((InputMethodManager) context.getSystemService(
Context.INPUT_METHOD_SERVICE)).toggleSoftInput(
InputMethodManager.SHOW_IMPLICIT,
InputMethodManager.HIDE_NOT_ALWAYS);
}
/**
* 判断隐藏软键盘是否弹出,弹出就隐藏
*
* @param mActivity
* @return
*/
public boolean keyBoxIsShow(Activity mActivity) {
if (mActivity.getWindow().getAttributes().softInputMode == WindowManager.LayoutParams.SOFT_INPUT_STATE_UNSPECIFIED) {
//隐藏软键盘
mActivity.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN);
return true;
} else {
return false;
}
}
}
| apache-2.0 |
Padepokan79/PUBTeam | webseedJava/Controller/Dyah_Nuraeni/MdTbLainController.java | 208 | package app.controllers.api.masterdata;
import app.models.MdSkpdtbl;
import app.models.MdTbLain;
import core.controllers.CRUDController;
public class MdTbLainController extends CRUDController<MdTbLain>{
}
| apache-2.0 |
youdonghai/intellij-community | platform/testGuiFramework/testSrc/com/intellij/testGuiFramework/tests/samples/AddActionToolbarTest.java | 3193 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testGuiFramework.tests.samples;
import com.intellij.ide.ui.UISettings;
import com.intellij.testGuiFramework.fixtures.ActionButtonFixture;
import com.intellij.testGuiFramework.fixtures.IdeFrameFixture;
import com.intellij.testGuiFramework.fixtures.JDialogFixture;
import com.intellij.testGuiFramework.fixtures.SettingsTreeFixture;
import com.intellij.testGuiFramework.impl.GuiTestCase;
import com.intellij.ui.treeStructure.Tree;
import org.fest.swing.timing.Pause;
import org.junit.Ignore;
import org.junit.Test;
import static com.intellij.testGuiFramework.framework.GuiTestUtil.*;
public class AddActionToolbarTest extends GuiTestCase {
@Test @Ignore
//Mac only test
public void testAddActionToolbar() throws Exception {
//import project
IdeFrameFixture ideFrameFixture = importSimpleProject();
ideFrameFixture.waitForBackgroundTasksToFinish();
//check toolbar and open if is hidden
if (!UISettings.getInstance().SHOW_MAIN_TOOLBAR) {
ideFrameFixture.invokeMenuPath("View", "Toolbar");
}
//open Settings
invokeActionViaShortcut(myRobot, "meta comma");
//find settings dialog
JDialogFixture preferencesDialog = JDialogFixture.find(myRobot, "Preferences");
//SettingsTree Appearance & Behavior -> Menus and Toolbars
SettingsTreeFixture.find(myRobot).select("Appearance & Behavior/Menus and Toolbars");
Pause.pause(2000L);
//tree: Main Toolbar/Help
findJTreeFixtureByClassName(myRobot, preferencesDialog.target(), Tree.class.getName()).clickPath("Main Toolbar/Help");
//click Add After...
findAndClickButton(preferencesDialog, "Add After...");
//Choose Actions To Add
JDialogFixture dialogFixture = JDialogFixture.find(myRobot, "Choose Actions To Add");
//tree: All Actions/Main menu/File/Print...
findJTreeFixtureByClassName(myRobot, dialogFixture.target(), Tree.class.getName()).clickPath("All Actions/Main menu/File/Print...");
//clickOK
findAndClickOkButton(dialogFixture);
//clickOk
findAndClickOkButton(preferencesDialog);
//choose File in project tree
ideFrameFixture.getProjectView().selectProjectPane().selectByPath(ideFrameFixture.getProject().getName(), "src", "Main.java").click();
//ActionButton("Print") wait and click
ActionButtonFixture.findByActionId("Print", myRobot, ideFrameFixture.target()).waitUntilEnabledAndShowing().click();
//Dialog("Print")
JDialogFixture printDialog = JDialogFixture.find(myRobot, "Print");
//close dialog
findAndClickCancelButton(printDialog);
Pause.pause(5000L);
}
}
| apache-2.0 |
edmccard/tvis | tvis/examples/keytest.rs | 1027 | extern crate tvis;
use std::sync::mpsc::channel;
use tvis::term::{self, BoldOrBright, UseTruecolor};
use tvis::input::{InputEvent, Key};
fn main() {
let (tx, rx) = channel();
let mut screen =
term::connect_with_input(tx, UseTruecolor::Auto, BoldOrBright::Bold)
.unwrap();
if !screen.is_tty_input() || !screen.is_tty_output() {
screen.log("input or output is not a terminal");
return;
}
screen.start_input().unwrap();
for evt in rx.iter() {
if let Some(evt) = evt.as_any().downcast_ref::<InputEvent>() {
match *evt {
InputEvent::Key(Key::Char('`', _, _), _) => return,
InputEvent::Key(k, m) => {
screen.log(&format!("KEY {}{}\r", m, k,));
}
_ => {
screen.log(&format!("EVENT: {:?}\r", evt));
}
}
}
}
screen.log("SHUTTING DOWN\r");
::std::thread::sleep(::std::time::Duration::from_secs(3));
}
| apache-2.0 |
ZhouweiDev/tinyweather | app/src/main/java/com/tinyweather/android/MainActivity.java | 768 | package com.tinyweather.android;
import android.content.Intent;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import java.util.zip.InflaterInputStream;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SharedPreferences prefs= PreferenceManager.getDefaultSharedPreferences(this);
if (prefs.getString("weather",null)!=null){
Intent intent=new Intent(this,WeatherActivity.class);
startActivity(intent);
finish();
}
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-ecs/src/main/java/com/amazonaws/services/ecs/model/ListTasksRequest.java | 38960 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ecs.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ecs-2014-11-13/ListTasks" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTasksRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do not
* specify a cluster, the default cluster is assumed.
* </p>
*/
private String cluster;
/**
* <p>
* The container instance ID or full ARN of the container instance with which to filter the <code>ListTasks</code>
* results. Specifying a <code>containerInstance</code> limits the results to tasks that belong to that container
* instance.
* </p>
*/
private String containerInstance;
/**
* <p>
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a <code>family</code>
* limits the results to tasks that belong to that family.
* </p>
*/
private String family;
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues from
* the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*/
private String nextToken;
/**
* <p>
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this parameter is
* used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page along with a
* <code>nextToken</code> response element. The remaining results of the initial request can be seen by sending
* another <code>ListTasks</code> request with the returned <code>nextToken</code> value. This value can be between
* 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up to 100 results and a
* <code>nextToken</code> value if applicable.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The <code>startedBy</code> value with which to filter the task results. Specifying a <code>startedBy</code> value
* limits the results to tasks that were started with that value.
* </p>
*/
private String startedBy;
/**
* <p>
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
* </p>
*/
private String serviceName;
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*/
private String desiredStatus;
/**
* <p>
* The launch type for services to list.
* </p>
*/
private String launchType;
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do not
* specify a cluster, the default cluster is assumed.
* </p>
*
* @param cluster
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do
* not specify a cluster, the default cluster is assumed.
*/
public void setCluster(String cluster) {
this.cluster = cluster;
}
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do not
* specify a cluster, the default cluster is assumed.
* </p>
*
* @return The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do
* not specify a cluster, the default cluster is assumed.
*/
public String getCluster() {
return this.cluster;
}
/**
* <p>
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do not
* specify a cluster, the default cluster is assumed.
* </p>
*
* @param cluster
* The short name or full Amazon Resource Name (ARN) of the cluster that hosts the tasks to list. If you do
* not specify a cluster, the default cluster is assumed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withCluster(String cluster) {
setCluster(cluster);
return this;
}
/**
* <p>
* The container instance ID or full ARN of the container instance with which to filter the <code>ListTasks</code>
* results. Specifying a <code>containerInstance</code> limits the results to tasks that belong to that container
* instance.
* </p>
*
* @param containerInstance
* The container instance ID or full ARN of the container instance with which to filter the
* <code>ListTasks</code> results. Specifying a <code>containerInstance</code> limits the results to tasks
* that belong to that container instance.
*/
public void setContainerInstance(String containerInstance) {
this.containerInstance = containerInstance;
}
/**
* <p>
* The container instance ID or full ARN of the container instance with which to filter the <code>ListTasks</code>
* results. Specifying a <code>containerInstance</code> limits the results to tasks that belong to that container
* instance.
* </p>
*
* @return The container instance ID or full ARN of the container instance with which to filter the
* <code>ListTasks</code> results. Specifying a <code>containerInstance</code> limits the results to tasks
* that belong to that container instance.
*/
public String getContainerInstance() {
return this.containerInstance;
}
/**
* <p>
* The container instance ID or full ARN of the container instance with which to filter the <code>ListTasks</code>
* results. Specifying a <code>containerInstance</code> limits the results to tasks that belong to that container
* instance.
* </p>
*
* @param containerInstance
* The container instance ID or full ARN of the container instance with which to filter the
* <code>ListTasks</code> results. Specifying a <code>containerInstance</code> limits the results to tasks
* that belong to that container instance.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withContainerInstance(String containerInstance) {
setContainerInstance(containerInstance);
return this;
}
/**
* <p>
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a <code>family</code>
* limits the results to tasks that belong to that family.
* </p>
*
* @param family
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a
* <code>family</code> limits the results to tasks that belong to that family.
*/
public void setFamily(String family) {
this.family = family;
}
/**
* <p>
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a <code>family</code>
* limits the results to tasks that belong to that family.
* </p>
*
* @return The name of the family with which to filter the <code>ListTasks</code> results. Specifying a
* <code>family</code> limits the results to tasks that belong to that family.
*/
public String getFamily() {
return this.family;
}
/**
* <p>
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a <code>family</code>
* limits the results to tasks that belong to that family.
* </p>
*
* @param family
* The name of the family with which to filter the <code>ListTasks</code> results. Specifying a
* <code>family</code> limits the results to tasks that belong to that family.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withFamily(String family) {
setFamily(family);
return this;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues from
* the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @param nextToken
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.</p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues from
* the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @return The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.</p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination continues from
* the end of the previous results that returned the <code>nextToken</code> value.
* </p>
* <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and
* not for other programmatic purposes.
* </p>
* </note>
*
* @param nextToken
* The <code>nextToken</code> value returned from a previous paginated <code>ListTasks</code> request where
* <code>maxResults</code> was used and the results exceeded the value of that parameter. Pagination
* continues from the end of the previous results that returned the <code>nextToken</code> value.</p> <note>
* <p>
* This token should be treated as an opaque identifier that is only used to retrieve the next items in a
* list and not for other programmatic purposes.
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this parameter is
* used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page along with a
* <code>nextToken</code> response element. The remaining results of the initial request can be seen by sending
* another <code>ListTasks</code> request with the returned <code>nextToken</code> value. This value can be between
* 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up to 100 results and a
* <code>nextToken</code> value if applicable.
* </p>
*
* @param maxResults
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this
* parameter is used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page
* along with a <code>nextToken</code> response element. The remaining results of the initial request can be
* seen by sending another <code>ListTasks</code> request with the returned <code>nextToken</code> value.
* This value can be between 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up
* to 100 results and a <code>nextToken</code> value if applicable.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this parameter is
* used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page along with a
* <code>nextToken</code> response element. The remaining results of the initial request can be seen by sending
* another <code>ListTasks</code> request with the returned <code>nextToken</code> value. This value can be between
* 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up to 100 results and a
* <code>nextToken</code> value if applicable.
* </p>
*
* @return The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this
* parameter is used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page
* along with a <code>nextToken</code> response element. The remaining results of the initial request can be
* seen by sending another <code>ListTasks</code> request with the returned <code>nextToken</code> value.
* This value can be between 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns
* up to 100 results and a <code>nextToken</code> value if applicable.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this parameter is
* used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page along with a
* <code>nextToken</code> response element. The remaining results of the initial request can be seen by sending
* another <code>ListTasks</code> request with the returned <code>nextToken</code> value. This value can be between
* 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up to 100 results and a
* <code>nextToken</code> value if applicable.
* </p>
*
* @param maxResults
* The maximum number of task results returned by <code>ListTasks</code> in paginated output. When this
* parameter is used, <code>ListTasks</code> only returns <code>maxResults</code> results in a single page
* along with a <code>nextToken</code> response element. The remaining results of the initial request can be
* seen by sending another <code>ListTasks</code> request with the returned <code>nextToken</code> value.
* This value can be between 1 and 100. If this parameter is not used, then <code>ListTasks</code> returns up
* to 100 results and a <code>nextToken</code> value if applicable.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* The <code>startedBy</code> value with which to filter the task results. Specifying a <code>startedBy</code> value
* limits the results to tasks that were started with that value.
* </p>
*
* @param startedBy
* The <code>startedBy</code> value with which to filter the task results. Specifying a
* <code>startedBy</code> value limits the results to tasks that were started with that value.
*/
public void setStartedBy(String startedBy) {
this.startedBy = startedBy;
}
/**
* <p>
* The <code>startedBy</code> value with which to filter the task results. Specifying a <code>startedBy</code> value
* limits the results to tasks that were started with that value.
* </p>
*
* @return The <code>startedBy</code> value with which to filter the task results. Specifying a
* <code>startedBy</code> value limits the results to tasks that were started with that value.
*/
public String getStartedBy() {
return this.startedBy;
}
/**
* <p>
* The <code>startedBy</code> value with which to filter the task results. Specifying a <code>startedBy</code> value
* limits the results to tasks that were started with that value.
* </p>
*
* @param startedBy
* The <code>startedBy</code> value with which to filter the task results. Specifying a
* <code>startedBy</code> value limits the results to tasks that were started with that value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withStartedBy(String startedBy) {
setStartedBy(startedBy);
return this;
}
/**
* <p>
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
* </p>
*
* @param serviceName
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
*/
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
/**
* <p>
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
* </p>
*
* @return The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
*/
public String getServiceName() {
return this.serviceName;
}
/**
* <p>
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
* </p>
*
* @param serviceName
* The name of the service with which to filter the <code>ListTasks</code> results. Specifying a
* <code>serviceName</code> limits the results to tasks that belong to that service.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTasksRequest withServiceName(String serviceName) {
setServiceName(serviceName);
return this;
}
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*
* @param desiredStatus
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting
* properly or have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks
* that Amazon ECS has set the desired status to <code>RUNNING</code>.</p> <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return
* any results. Amazon ECS never sets the desired status of a task to that value (only a task's
* <code>lastStatus</code> may have a value of <code>PENDING</code>).
* </p>
* @see DesiredStatus
*/
public void setDesiredStatus(String desiredStatus) {
this.desiredStatus = desiredStatus;
}
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*
* @return The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set
* the desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting
* properly or have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks
* that Amazon ECS has set the desired status to <code>RUNNING</code>.</p> <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return
* any results. Amazon ECS never sets the desired status of a task to that value (only a task's
* <code>lastStatus</code> may have a value of <code>PENDING</code>).
* </p>
* @see DesiredStatus
*/
public String getDesiredStatus() {
return this.desiredStatus;
}
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*
* @param desiredStatus
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting
* properly or have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks
* that Amazon ECS has set the desired status to <code>RUNNING</code>.</p> <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return
* any results. Amazon ECS never sets the desired status of a task to that value (only a task's
* <code>lastStatus</code> may have a value of <code>PENDING</code>).
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
* @see DesiredStatus
*/
public ListTasksRequest withDesiredStatus(String desiredStatus) {
setDesiredStatus(desiredStatus);
return this;
}
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*
* @param desiredStatus
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting
* properly or have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks
* that Amazon ECS has set the desired status to <code>RUNNING</code>.</p> <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return
* any results. Amazon ECS never sets the desired status of a task to that value (only a task's
* <code>lastStatus</code> may have a value of <code>PENDING</code>).
* </p>
* @see DesiredStatus
*/
public void setDesiredStatus(DesiredStatus desiredStatus) {
withDesiredStatus(desiredStatus);
}
/**
* <p>
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting properly or
* have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks that Amazon ECS has
* set the desired status to <code>RUNNING</code>.
* </p>
* <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return any
* results. Amazon ECS never sets the desired status of a task to that value (only a task's <code>lastStatus</code>
* may have a value of <code>PENDING</code>).
* </p>
* </note>
*
* @param desiredStatus
* The task desired status with which to filter the <code>ListTasks</code> results. Specifying a
* <code>desiredStatus</code> of <code>STOPPED</code> limits the results to tasks that Amazon ECS has set the
* desired status to <code>STOPPED</code>. This can be useful for debugging tasks that are not starting
* properly or have died or finished. The default status filter is <code>RUNNING</code>, which shows tasks
* that Amazon ECS has set the desired status to <code>RUNNING</code>.</p> <note>
* <p>
* Although you can filter results based on a desired status of <code>PENDING</code>, this does not return
* any results. Amazon ECS never sets the desired status of a task to that value (only a task's
* <code>lastStatus</code> may have a value of <code>PENDING</code>).
* </p>
* @return Returns a reference to this object so that method calls can be chained together.
* @see DesiredStatus
*/
public ListTasksRequest withDesiredStatus(DesiredStatus desiredStatus) {
this.desiredStatus = desiredStatus.toString();
return this;
}
/**
* <p>
* The launch type for services to list.
* </p>
*
* @param launchType
* The launch type for services to list.
* @see LaunchType
*/
public void setLaunchType(String launchType) {
this.launchType = launchType;
}
/**
* <p>
* The launch type for services to list.
* </p>
*
* @return The launch type for services to list.
* @see LaunchType
*/
public String getLaunchType() {
return this.launchType;
}
/**
* <p>
* The launch type for services to list.
* </p>
*
* @param launchType
* The launch type for services to list.
* @return Returns a reference to this object so that method calls can be chained together.
* @see LaunchType
*/
public ListTasksRequest withLaunchType(String launchType) {
setLaunchType(launchType);
return this;
}
/**
* <p>
* The launch type for services to list.
* </p>
*
* @param launchType
* The launch type for services to list.
* @see LaunchType
*/
public void setLaunchType(LaunchType launchType) {
withLaunchType(launchType);
}
/**
* <p>
* The launch type for services to list.
* </p>
*
* @param launchType
* The launch type for services to list.
* @return Returns a reference to this object so that method calls can be chained together.
* @see LaunchType
*/
public ListTasksRequest withLaunchType(LaunchType launchType) {
this.launchType = launchType.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getCluster() != null)
sb.append("Cluster: ").append(getCluster()).append(",");
if (getContainerInstance() != null)
sb.append("ContainerInstance: ").append(getContainerInstance()).append(",");
if (getFamily() != null)
sb.append("Family: ").append(getFamily()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getStartedBy() != null)
sb.append("StartedBy: ").append(getStartedBy()).append(",");
if (getServiceName() != null)
sb.append("ServiceName: ").append(getServiceName()).append(",");
if (getDesiredStatus() != null)
sb.append("DesiredStatus: ").append(getDesiredStatus()).append(",");
if (getLaunchType() != null)
sb.append("LaunchType: ").append(getLaunchType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTasksRequest == false)
return false;
ListTasksRequest other = (ListTasksRequest) obj;
if (other.getCluster() == null ^ this.getCluster() == null)
return false;
if (other.getCluster() != null && other.getCluster().equals(this.getCluster()) == false)
return false;
if (other.getContainerInstance() == null ^ this.getContainerInstance() == null)
return false;
if (other.getContainerInstance() != null && other.getContainerInstance().equals(this.getContainerInstance()) == false)
return false;
if (other.getFamily() == null ^ this.getFamily() == null)
return false;
if (other.getFamily() != null && other.getFamily().equals(this.getFamily()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getStartedBy() == null ^ this.getStartedBy() == null)
return false;
if (other.getStartedBy() != null && other.getStartedBy().equals(this.getStartedBy()) == false)
return false;
if (other.getServiceName() == null ^ this.getServiceName() == null)
return false;
if (other.getServiceName() != null && other.getServiceName().equals(this.getServiceName()) == false)
return false;
if (other.getDesiredStatus() == null ^ this.getDesiredStatus() == null)
return false;
if (other.getDesiredStatus() != null && other.getDesiredStatus().equals(this.getDesiredStatus()) == false)
return false;
if (other.getLaunchType() == null ^ this.getLaunchType() == null)
return false;
if (other.getLaunchType() != null && other.getLaunchType().equals(this.getLaunchType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getCluster() == null) ? 0 : getCluster().hashCode());
hashCode = prime * hashCode + ((getContainerInstance() == null) ? 0 : getContainerInstance().hashCode());
hashCode = prime * hashCode + ((getFamily() == null) ? 0 : getFamily().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getStartedBy() == null) ? 0 : getStartedBy().hashCode());
hashCode = prime * hashCode + ((getServiceName() == null) ? 0 : getServiceName().hashCode());
hashCode = prime * hashCode + ((getDesiredStatus() == null) ? 0 : getDesiredStatus().hashCode());
hashCode = prime * hashCode + ((getLaunchType() == null) ? 0 : getLaunchType().hashCode());
return hashCode;
}
@Override
public ListTasksRequest clone() {
return (ListTasksRequest) super.clone();
}
}
| apache-2.0 |
andrewschaaf/closure-library-mirror | closure/goog/net/xpc/crosspagechannel.js | 17866 | // Copyright 2007 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Provides the class CrossPageChannel, the main class in
* goog.net.xpc.
*
* @see ../../demos/xpc/index.html
*/
goog.provide('goog.net.xpc.CrossPageChannel');
goog.provide('goog.net.xpc.CrossPageChannel.Role');
goog.require('goog.Disposable');
goog.require('goog.Uri');
goog.require('goog.dom');
goog.require('goog.events');
goog.require('goog.json');
goog.require('goog.messaging.MessageChannel'); // interface
goog.require('goog.net.xpc');
goog.require('goog.net.xpc.FrameElementMethodTransport');
goog.require('goog.net.xpc.IframePollingTransport');
goog.require('goog.net.xpc.IframeRelayTransport');
goog.require('goog.net.xpc.NativeMessagingTransport');
goog.require('goog.net.xpc.NixTransport');
goog.require('goog.net.xpc.Transport');
goog.require('goog.userAgent');
/**
* A communication channel between two documents from different domains.
* Provides asynchronous messaging.
*
* @param {Object} cfg Channel configuration object.
* @param {goog.dom.DomHelper} opt_domHelper The optional dom helper to
* use for looking up elements in the dom.
* @constructor
* @implements {goog.messaging.MessageChannel}
* @extends {goog.Disposable}
*/
goog.net.xpc.CrossPageChannel = function(cfg, opt_domHelper) {
goog.Disposable.call(this);
/**
* The configuration for this channel.
* @type {Object}
* @private
*/
this.cfg_ = cfg;
/**
* The name of the channel.
* @type {string}
* @protected
*/
this.name = this.cfg_[goog.net.xpc.CfgFields.CHANNEL_NAME] ||
goog.net.xpc.getRandomString(10);
/**
* Object holding the service callbacks.
* @type {Object}
* @private
*/
this.services_ = {};
/**
* The dom helper to use for accessing the dom.
* @type {goog.dom.DomHelper}
* @private
*/
this.domHelper_ = opt_domHelper || goog.dom.getDomHelper();
goog.net.xpc.channels_[this.name] = this;
goog.events.listen(window, 'unload',
goog.net.xpc.CrossPageChannel.disposeAll_);
goog.net.xpc.logger.info('CrossPageChannel created: ' + this.name);
};
goog.inherits(goog.net.xpc.CrossPageChannel, goog.Disposable);
/**
* The transport.
* @type {goog.net.xpc.Transport?}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.transport_ = null;
/**
* The channel state.
* @type {number}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.state_ =
goog.net.xpc.ChannelStates.NOT_CONNECTED;
/**
* @return {boolean} Whether the channel is connected.
*/
goog.net.xpc.CrossPageChannel.prototype.isConnected = function() {
return this.state_ == goog.net.xpc.ChannelStates.CONNECTED;
};
/**
* Reference to the window-object of the peer page.
* @type {Object}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.peerWindowObject_ = null;
/**
* Reference to the iframe-element.
* @type {Object}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.iframeElement_ = null;
/**
* Sets the window object the foreign document resides in.
*
* @param {Object} peerWindowObject The window object of the peer.
*/
goog.net.xpc.CrossPageChannel.prototype.setPeerWindowObject =
function(peerWindowObject) {
this.peerWindowObject_ = peerWindowObject;
};
/**
* Determine which transport type to use for this channel / useragent.
* @return {goog.net.xpc.TransportTypes|undefined} The best transport type.
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.determineTransportType_ = function() {
var transportType;
if (goog.isFunction(document.postMessage) ||
goog.isFunction(window.postMessage) ||
// IE8 supports window.postMessage, but
// typeof window.postMessage returns "object"
(goog.userAgent.IE && window.postMessage)) {
transportType = goog.net.xpc.TransportTypes.NATIVE_MESSAGING;
} else if (goog.userAgent.GECKO) {
transportType = goog.net.xpc.TransportTypes.FRAME_ELEMENT_METHOD;
} else if (goog.userAgent.IE &&
this.cfg_[goog.net.xpc.CfgFields.PEER_RELAY_URI]) {
transportType = goog.net.xpc.TransportTypes.IFRAME_RELAY;
} else if (goog.userAgent.IE) {
transportType = goog.net.xpc.TransportTypes.NIX;
} else if (this.cfg_[goog.net.xpc.CfgFields.LOCAL_POLL_URI] &&
this.cfg_[goog.net.xpc.CfgFields.PEER_POLL_URI]) {
transportType = goog.net.xpc.TransportTypes.IFRAME_POLLING;
}
return transportType;
};
/**
* Creates the transport for this channel. Chooses from the available
* transport based on the user agent and the configuration.
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.createTransport_ = function() {
// return, if the transport has already been created
if (this.transport_) {
return;
}
if (!this.cfg_[goog.net.xpc.CfgFields.TRANSPORT]) {
this.cfg_[goog.net.xpc.CfgFields.TRANSPORT] =
this.determineTransportType_();
}
switch (this.cfg_[goog.net.xpc.CfgFields.TRANSPORT]) {
case goog.net.xpc.TransportTypes.NATIVE_MESSAGING:
this.transport_ = new goog.net.xpc.NativeMessagingTransport(
this,
this.cfg_[goog.net.xpc.CfgFields.PEER_HOSTNAME],
this.domHelper_);
break;
case goog.net.xpc.TransportTypes.NIX:
this.transport_ = new goog.net.xpc.NixTransport(this, this.domHelper_);
break;
case goog.net.xpc.TransportTypes.FRAME_ELEMENT_METHOD:
this.transport_ =
new goog.net.xpc.FrameElementMethodTransport(this, this.domHelper_);
break;
case goog.net.xpc.TransportTypes.IFRAME_RELAY:
this.transport_ =
new goog.net.xpc.IframeRelayTransport(this, this.domHelper_);
break;
case goog.net.xpc.TransportTypes.IFRAME_POLLING:
this.transport_ =
new goog.net.xpc.IframePollingTransport(this, this.domHelper_);
break;
}
if (this.transport_) {
goog.net.xpc.logger.info('Transport created: ' + this.transport_.getName());
} else {
throw Error('CrossPageChannel: No suitable transport found!');
}
};
/**
* Returns the transport type in use for this channel.
* @return {number} Transport-type identifier.
*/
goog.net.xpc.CrossPageChannel.prototype.getTransportType = function() {
return this.transport_.getType();
};
/**
* Returns the tranport name in use for this channel.
* @return {string} The transport name.
*/
goog.net.xpc.CrossPageChannel.prototype.getTransportName = function() {
return this.transport_.getName();
};
/**
* @return {Object} Configuration-object to be used by the peer to
* initialize the channel.
*/
goog.net.xpc.CrossPageChannel.prototype.getPeerConfiguration = function() {
var peerCfg = {};
peerCfg[goog.net.xpc.CfgFields.CHANNEL_NAME] = this.name;
peerCfg[goog.net.xpc.CfgFields.TRANSPORT] =
this.cfg_[goog.net.xpc.CfgFields.TRANSPORT];
if (this.cfg_[goog.net.xpc.CfgFields.LOCAL_RELAY_URI]) {
peerCfg[goog.net.xpc.CfgFields.PEER_RELAY_URI] =
this.cfg_[goog.net.xpc.CfgFields.LOCAL_RELAY_URI];
}
if (this.cfg_[goog.net.xpc.CfgFields.LOCAL_POLL_URI]) {
peerCfg[goog.net.xpc.CfgFields.PEER_POLL_URI] =
this.cfg_[goog.net.xpc.CfgFields.LOCAL_POLL_URI];
}
if (this.cfg_[goog.net.xpc.CfgFields.PEER_POLL_URI]) {
peerCfg[goog.net.xpc.CfgFields.LOCAL_POLL_URI] =
this.cfg_[goog.net.xpc.CfgFields.PEER_POLL_URI];
}
return peerCfg;
};
/**
* Creates the iframe containing the peer page in a specified parent element.
* This method does not connect the channel, connect() still has to be called
* separately.
*
* @param {!Element} parentElm The container element the iframe is appended to.
* @param {Function=} opt_configureIframeCb If present, this function gets
* called with the iframe element as parameter to allow setting properties
* on it before it gets added to the DOM. If absent, the iframe's width and
* height are set to '100%'.
* @param {boolean=} opt_addCfgParam Whether to add the peer configuration as
* URL parameter (default: true).
* @return {!HTMLIFrameElement} The iframe element.
*/
goog.net.xpc.CrossPageChannel.prototype.createPeerIframe = function(
parentElm, opt_configureIframeCb, opt_addCfgParam) {
var iframeId = this.cfg_[goog.net.xpc.CfgFields.IFRAME_ID];
if (!iframeId) {
// Create a randomized ID for the iframe element to avoid
// bfcache-related issues.
iframeId = this.cfg_[goog.net.xpc.CfgFields.IFRAME_ID] =
'xpcpeer' + goog.net.xpc.getRandomString(4);
}
// TODO(user) Opera creates a history-entry when creating an iframe
// programmatically as follows. Find a way which avoids this.
var iframeElm = goog.dom.createElement('IFRAME');
iframeElm.id = iframeElm.name = iframeId;
if (opt_configureIframeCb) {
opt_configureIframeCb(iframeElm);
} else {
iframeElm.style.width = iframeElm.style.height = '100%';
}
var peerUri = this.cfg_[goog.net.xpc.CfgFields.PEER_URI];
if (goog.isString(peerUri)) {
peerUri = this.cfg_[goog.net.xpc.CfgFields.PEER_URI] =
new goog.Uri(peerUri);
}
// Add the channel configuration used by the peer as URL parameter.
if (opt_addCfgParam !== false) {
peerUri.setParameterValue('xpc',
goog.json.serialize(
this.getPeerConfiguration())
);
}
if (goog.userAgent.GECKO || goog.userAgent.WEBKIT) {
// Appending the iframe in a timeout to avoid a weird fastback issue, which
// is present in Safari and Gecko.
this.deferConnect_ = true;
window.setTimeout(
goog.bind(function() {
this.deferConnect_ = false;
parentElm.appendChild(iframeElm);
iframeElm.src = peerUri.toString();
goog.net.xpc.logger.info('peer iframe created (' + iframeId + ')');
if (this.connectDeferred_) {
this.connect(this.connectCb_);
}
}, this), 1);
} else {
iframeElm.src = peerUri.toString();
parentElm.appendChild(iframeElm);
goog.net.xpc.logger.info('peer iframe created (' + iframeId + ')');
}
return /** @type {!HTMLIFrameElement} */ (iframeElm);
};
/**
* Flag whether connecting should be deferred.
* @type {boolean}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.deferConnect_ = false;
/**
* Flag to remember if connect() has been called.
* @type {boolean}
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.connectDeferred_ = false;
/**
* Initiates connecting the channel. When this method is called, all the
* information needed to connect the channel has to be available.
*
* @param {Function=} opt_connectCb The function to be called when the
* channel has been connected and is ready to be used.
*/
goog.net.xpc.CrossPageChannel.prototype.connect = function(opt_connectCb) {
this.connectCb_ = opt_connectCb || goog.nullFunction;
if (this.deferConnect_) {
goog.net.xpc.logger.info('connect() deferred');
this.connectDeferred_ = true;
return;
}
goog.net.xpc.logger.info('connect()');
if (this.cfg_[goog.net.xpc.CfgFields.IFRAME_ID]) {
this.iframeElement_ = this.domHelper_.getElement(
this.cfg_[goog.net.xpc.CfgFields.IFRAME_ID]);
}
if (this.iframeElement_) {
var winObj = this.iframeElement_.contentWindow;
// accessing the window using contentWindow doesn't work in safari
if (!winObj) {
winObj = window.frames[this.cfg_[goog.net.xpc.CfgFields.IFRAME_ID]];
}
this.setPeerWindowObject(winObj);
}
// if the peer window object has not been set at this point, we assume
// being in an iframe and the channel is meant to be to the containing page
if (!this.peerWindowObject_) {
// throw an error if we are in the top window (== not in an iframe)
if (window == top) {
throw Error(
"CrossPageChannel: Can't connect, peer window-object not set.");
} else {
this.setPeerWindowObject(window.parent);
}
}
this.createTransport_();
this.transport_.connect();
};
/**
* Closes the channel.
*/
goog.net.xpc.CrossPageChannel.prototype.close = function() {
if (!this.isConnected()) return;
this.state_ = goog.net.xpc.ChannelStates.CLOSED;
this.transport_.dispose();
this.transport_ = null;
goog.net.xpc.logger.info('Channel "' + this.name + '" closed');
};
/**
* Called by the transport when the channel is connected.
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.notifyConnected_ = function() {
if (this.isConnected()) {
return;
}
this.state_ = goog.net.xpc.ChannelStates.CONNECTED;
goog.net.xpc.logger.info('Channel "' + this.name + '" connected');
this.connectCb_();
};
/**
* Called by the transport in case of an unrecoverable failure.
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.notifyTransportError_ = function() {
goog.net.xpc.logger.info('Transport Error');
this.close();
};
/**
* Registers a service.
*
* @param {string} serviceName The name of the service.
* @param {Function} callback The callback responsible to process incoming
* messages.
* @param {boolean=} opt_jsonEncoded If true, incoming messages for this
* service are expected to contain a JSON-encoded object and will be
* deserialized automatically.
*/
goog.net.xpc.CrossPageChannel.prototype.registerService = function(
serviceName, callback, opt_jsonEncoded) {
this.services_[serviceName] = {
name: serviceName,
callback: callback,
jsonEncoded: !!opt_jsonEncoded
};
};
/**
* Registers a service to handle any messages that aren't handled by any other
* services.
*
* @param {function(string, (string|Object))} callback The callback responsible
* for processing incoming messages that aren't processed by other services.
*/
goog.net.xpc.CrossPageChannel.prototype.registerDefaultService = function(
callback) {
this.defaultService_ = callback;
};
/**
* Sends a msg over the channel.
*
* @param {string} serviceName The name of the service this message
* should be delivered to.
* @param {string|Object} payload The payload. If this is an object, it is
* serialized to JSON before sending.
*/
goog.net.xpc.CrossPageChannel.prototype.send = function(serviceName, payload) {
if (!this.isConnected()) {
goog.net.xpc.logger.severe('Can\'t send. Channel not connected.');
return;
}
// Check if the peer is still around.
// NOTE(user): This check is not reliable in IE, where a document in an
// iframe does not get unloaded when removing the iframe element from the DOM.
// TODO(user): Find something that works in IE as well.
if (this.peerWindowObject_.closed) {
goog.net.xpc.logger.severe('Peer has disappeared.');
this.close();
return;
}
if (goog.isObject(payload)) {
payload = goog.json.serialize(payload);
}
this.transport_.send(serviceName, payload);
};
/**
* Delivers messages to the appropriate service-handler.
*
* @param {string} serviceName The name of the port.
* @param {string} payload The payload.
* @private
*/
goog.net.xpc.CrossPageChannel.prototype.deliver_ = function(serviceName,
payload) {
if (this.isDisposed()) {
goog.net.xpc.logger.warning('CrossPageChannel::deliver_(): Disposed.');
} else if (!serviceName ||
serviceName == goog.net.xpc.TRANSPORT_SERVICE_) {
this.transport_.transportServiceHandler(payload);
} else {
// only deliver messages if connected
if (this.isConnected()) {
var service = this.services_[serviceName];
if (service) {
if (service.jsonEncoded) {
/** @preserveTry */
try {
payload = goog.json.parse(payload);
} catch (e) {
goog.net.xpc.logger.info('Error parsing JSON-encoded payload.');
return;
}
}
service.callback(payload);
} else if (this.defaultService_) {
this.defaultService_.callback(payload);
} else {
goog.net.xpc.logger.info('CrossPageChannel::deliver_(): ' +
'No such service: "' + serviceName + '" ' +
'(payload: ' + payload + ')');
}
} else {
goog.net.xpc.logger.info('CrossPageChannel::deliver_(): Not connected.');
}
}
};
/**
* The role of the peer.
* @enum {number}
*/
goog.net.xpc.CrossPageChannel.Role = {
OUTER: 0,
INNER: 1
};
/**
* Returns the role of this channel (either inner or outer).
* @return {number} The role of this channel.
*/
goog.net.xpc.CrossPageChannel.prototype.getRole = function() {
return window.parent == this.peerWindowObject_ ?
goog.net.xpc.CrossPageChannel.Role.INNER :
goog.net.xpc.CrossPageChannel.Role.OUTER;
};
/**
* Disposes of the channel.
*/
goog.net.xpc.CrossPageChannel.prototype.disposeInternal = function() {
goog.net.xpc.CrossPageChannel.superClass_.disposeInternal.call(this);
this.close();
this.peerWindowObject_ = null;
this.iframeElement_ = null;
delete this.services_;
delete goog.net.xpc.channels_[this.name];
};
/**
* Disposes all channels.
* @private
*/
goog.net.xpc.CrossPageChannel.disposeAll_ = function() {
for (var name in goog.net.xpc.channels_) {
var ch = goog.net.xpc.channels_[name];
if (ch) {
ch.dispose();
}
}
};
| apache-2.0 |
ptgrogan/ofspy | ofspy/player/module.py | 25376 | """
Copyright 2015 Paul T. Grogan, Massachusetts Institute of Technology
Copyright 2017 Paul T. Grogan, Stevens Institute of Technology
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
The L{ofspy.player.module} package contains classes related to
player-controlled modules.
"""
from ..simulation import Entity
class Module(Entity):
"""
A L{Module} represents a functional subsystem within an element.
"""
def __init__(self, name=None, cost=0, size=1, capacity=0):
"""
@param cost: the cost of this module
@type cost: L{float}
@param size: the size of this module
@type size: L{float}
@param capacity: the data capacity of this module
@type capacity: L{int}
"""
Entity.__init__(self, name=name)
self.cost = cost
self.size = size
self.capacity = capacity
self._initData = []
self.data = self._initData[:]
def getContentsSize(self):
"""
Gets the total size of data in this module.
@return: L{int}
"""
return sum(d.size for d in self.data)
def couldExchange(self, data, module):
"""
Checks if this module can exchange data with another module (state-independent).
@param data: the data to exchange
@type data: L{Data}
@param module: the module with which to exchange
@type module: L{Subsystem}
@return: L{bool}
"""
return self.couldTransferOut(data) \
and module.couldTransferIn(data) \
and any(module.couldTransferOut(d)
and self.couldTransferIn(d)
for d in module.data)
def canExchange(self, data, module):
"""
Checks if this module can exchange data with another module.
@param data: the data to exchange
@type data: L{Data}
@param module: the module with which to exchange
@type module: L{Subsystem}
@return: L{bool}
"""
return self.canTransferOut(data) \
and module.couldTransferIn(data) \
and any(module.canTransferOut(d)
and self.couldTransferIn(d)
for d in module.data)
def exchange(self, data, module):
"""
Exchanges data with another module.
@param data: the data to exchange
@type data: L{Data}
@param module: the module with which to exchange
@type module: L{Subsystem}
@return: L{bool}
"""
if self.canExchange(data, module):
otherData = next(d for d in module.data
if module.canTransferOut(d)
and self.couldTransferIn(d))
if self.transferOut(data) \
and module.transferOut(otherData) \
and self.transferIn(otherData) \
and module.transferIn(data):
self.trigger('exchange', self, data, module)
return True
return False
def couldTransferIn(self, data):
"""
Checks if this module could transfer in data (state-independent).
@param data: the data to transfer in
@type data: L{Data}
@return: L{bool}
"""
return self.capacity >= data.size
def canTransferIn(self, data):
"""
Checks if this module can transfer in data (state-dependent).
@param data: the data to transfer in
@type data: L{Data}
@return: L{bool}
"""
return self.couldTransferIn(data) \
and self.capacity >= data.size \
+ sum(d.size for d in self.data)
def transferIn(self, data):
"""
Transfers data in to this module.
@param data: the data to transfer in
@type data: L{Data}
@return: L{bool}
"""
if self.canTransferIn(data):
self.data.append(data)
self.trigger('transferIn', self, data)
return True
return False
def couldTransferOut(self, data):
"""
Checks if this module could transfer out data (state-independent).
@param data: the data to transfer out
@type data: L{Data}
@return: L{bool}
"""
return True
def canTransferOut(self, data):
"""
Checks if this module can transfer out data (state-dependent).
@param data: the data to transfer out
@type data: L{Data}
@return: L{bool}
"""
return data in self.data
def transferOut(self, data):
"""
Transfers data out of this module.
@param data: the data to transfer out
@type data: L{Data}
@return: L{bool}
"""
if self.canTransferOut(data):
self.data.remove(data)
self.trigger('transferOut', self, data)
return True
return False
def isStorage(self):
"""
Checks if this module can store data.
@return: L{bool}
"""
return False
def isSensor(self):
"""
Checks if this module can sense data.
@return: L{bool}
"""
return False
def isLink(self):
"""
Checks if this module can transmit/receive data.
@return: L{bool}
"""
return False
def isDefense(self):
"""
Checks if this is a defense module.
@return: L{bool}
"""
return False
def isISL(self):
"""
Checks if this module is an inter-satellite link.
@return: L{bool}
"""
return False
def isSGL(self):
"""
Checks if this module is a space-to-ground link.
@return: L{bool}
"""
return False
def init(self, sim):
"""
Initializes this module in a simulation.
@param sim: the simulator
@type sim: L{Simulator}
"""
super(Module, self).init(sim)
self.data = self._initData[:]
def tock(self):
"""
Tocks this module in a simulation.
"""
super(Module, self).tock()
if not self.isStorage():
del self.data[:]
class Defense(Module):
"""
A L{Defense} module provides resilience to disturbances.
"""
def __init__(self, name=None, cost=0, size=1):
"""
@param name: the name of this defense module
@type name: L{str}
@param cost: the cost of this defense module
@type cost: L{float}
@param size: the size of this defense module
@type size: L{float}
@param capacity: the data capacity of this defense module
@type capacity: L{int}
"""
Module.__init__(self, name=name, cost=cost, size=size, capacity=0)
def isDefense(self):
"""
Checks if this is a defense module.
@return: L{bool}
"""
return True
class Storage(Module):
"""
A L{Storage} module stores data.
"""
def __init__(self, name=None, cost=0, size=1, capacity=1):
"""
@param name: the name of this storage module
@type name: L{str}
@param cost: the cost of this storage module
@type cost: L{float}
@param size: the size of this storage module
@type size: L{float}
@param capacity: the data capacity of this storage module
@type capacity: L{int}
"""
Module.__init__(self, name=name, cost=cost,
size=size, capacity=capacity)
def couldStore(self, data):
"""
Checks if this module could store data (state-independent).
@param data: the data to store
@type data: L{Data}
@return: L{bool}
"""
return self.couldTransferIn(data)
def canStore(self, data):
"""
Checks if this module can store data (state-dependent).
@param data: the data to store
@type data: L{Data}
@return: L{bool}
"""
return self.canTransferIn(data)
def store(self, data):
"""
Stores data in this module.
@param data: the data to store
@type data: L{Data}
@return: L{bool}
"""
if self.canStore(data):
self.transferIn(data)
self.trigger('store', self, data)
return True
return False
def isStorage(self):
"""
Checks if this module can store data.
@return: L{bool}
"""
return True
class Sensor(Storage):
"""
An L{Sensor} senses a phenomenon and stores resulting data.
"""
def __init__(self, name=None, cost=0, size=1, capacity=1,
phenomenon=None, maxSensed=1):
"""
@param name: the name of this sensor
@type name: L{str}
@param cost: the cost of this sensor
@type cost: L{float}
@param size: the size of this sensor
@type size: L{float}
@param capacity: the data capacity of this sensor
@type capacity: L{int}
@param phenomenon: the phenomenon sensed by this sensor
@type phenomenon: L{str}
@param maxSensed: the max data sensed by this sensor each turn
@type maxSensed: L{int}
"""
Storage.__init__(self, name=name, cost=cost,
size=size, capacity=capacity)
self.phenomenon = phenomenon
self.maxSensed = maxSensed
self._initSensed = 0
self.sensed = self._initSensed
def couldSense(self, data):
"""
Checks if this sensor could sense data (state-independent).
@param data: the data to sense
@type data: L{Data}
@return: L{bool}
"""
return self.couldStore(data) \
and self.maxSensed >= data.size
def canSense(self, location, demand):
"""
Checks if this sensor can sense data (state-dependent).
@param location: the location
@type location: L{Location}
@param demand: the demand for which to sense
@type demand: L{Demand}
@return: L{bool}
"""
data = demand.generateData()
return self.couldSense(data) \
and self.maxSensed >= self.sensed + data.size \
and location.isOrbit() \
and location.altitude != "GEO" \
and demand.sector == location.sector
def senseAndStore(self, location, contract):
"""
Senses and stores data for a contract with this sensor.
@param location: the location
@type location: L{Location}
@param contract: the contract for which to sense
@type contract: L{Contract}
@return: L{bool}
"""
data = contract.demand.generateData(contract)
if self.canSense(location, contract.demand) \
and self.canStore(data):
self.sensed += data.size
self.trigger('sense', self, contract)
self.store(data)
return True
return False
def couldTransferIn(self, data):
"""
Checks if this sensor could transfer in data (state-independent).
@param data: the data to transfer in
@type data: L{Data}
@return: L{bool}
"""
return super(Sensor, self).couldTransferIn(data) \
and self.phenomenon == data.phenomenon
def isSensor(self):
"""
Checks if this subsystem can sense data.
@return: L{bool}
"""
return True
def init(self, sim):
"""
Initializes this sensor in a simulation.
@param sim: the simulator
@type sim: L{Simulator}
"""
super(Sensor, self).init(sim)
self.sensed = self._initSensed
def tock(self):
"""
Tocks this sensor in a simulation.
"""
super(Sensor, self).tock()
self.sensed = 0
class Link(Module):
"""
An L{Link} transports data between two elements.
"""
def __init__(self, name=None, cost=0, size=1, capacity=1,
protocol=None, maxTransmitted=1, maxReceived=1):
"""
@param name: the name of this Link
@type name: L{str}
@param cost: the cost of this Link
@type cost: L{float}
@param size: the size of this Link
@type size: L{float}
@param capacity: the data capacity of this Link
@type capacity: L{int}
@param protocol: the protocol of this Link
@type protocol: L{str}
@param maxTransmitted: the max data transmitted by this Link each turn
@type maxTransmitted: L{int}
@param maxReceived: the max data received by this Link each turn
@type maxReceived: L{int}
"""
Module.__init__(self, name=name, cost=cost,
size=size, capacity=capacity)
self.protocol = protocol
self.maxTransmitted = maxTransmitted
self.maxReceived = maxReceived
self._initTransmitted = 0
self.transmitted = self._initTransmitted
self._initReceived = 0
self.received = self._initReceived
def couldTransmit(self, data, receiver, txLocation=None, rxLocation=None, context=None):
"""
Checks if this Link could transmit data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param receiver: the receiver receiving the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return self.maxTransmitted >= data.size \
and self.protocol == receiver.protocol
def canTransmit(self, data, receiver, txLocation=None, rxLocation=None, context=None):
"""
Checks if this Link can transmit data (state-dependent).
@param data: the data to transmit
@type data: L{Data}
@param receiver: the receiver receiving the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return self.couldTransmit(data, receiver, txLocation, rxLocation, context) \
and self.maxTransmitted >= data.size + self.transmitted
# and self.canTransferOut(data)
def transmit(self, data, receiver, txLocation=None, rxLocation=None, context=None):
"""
Transmits data from this tranceiver.
@param data: the data to transmit
@type data: L{Data}
@param receiver: the receiver receiving the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
if self.canTransmit(data, receiver, txLocation, rxLocation, context) \
and self.canTransferOut(data) \
and self.transferOut(data):
self.transmitted += data.size
self.trigger('transmit', self, data, receiver)
return True
return False
def couldReceive(self, data, transmitter, txLocation=None, rxLocation=None, context=None):
"""
Checks if this Link could receive data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param transmitter: the transmitter transmitting the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return self.maxReceived >= data.size \
and self.protocol == transmitter.protocol
def canReceive(self, data, transmitter, txLocation=None, rxLocation=None, context=None):
"""
Checks if this Link can receive data (state-dependent).
@param data: the data to transmit
@type data: L{Data}
@param transmitter: the transmitter transmitting the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return self.couldReceive(data, transmitter, txLocation, rxLocation, context) \
and self.maxReceived >= data.size + self.received \
and self.canTransferIn(data)
def receive(self, data, transmitter, txLocation=None, rxLocation=None, context=None):
"""
Receives data with this tranceiver.
@param data: the data to transmit
@type data: L{Data}
@param transmitter: the transmitter transmitting the data
@type receiver: L{Link}
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
if self.canReceive(data, transmitter, txLocation, rxLocation, context) \
and self.transferIn(data):
self.received += data.size
self.trigger('receive', self, data, transmitter)
return True
return False
def isLink(self):
"""
Checks if this module can transmit and receive data.
@return: L{bool}
"""
return True
def init(self, sim):
"""
Initializes this Link in a simulation.
@param sim: the simulator
@type sim: L{Simulator}
"""
super(Link, self).init(sim)
self.transmitted = self._initTransmitted
self.received = self._initReceived
def tock(self):
"""
Tocks this Link in a simulation.
"""
super(Link, self).tock()
self.transmitted = 0
self.received = 0
class SpaceGroundLink(Link):
"""
An L{SpaceGroundLink} transports data from a satellite to a ground station.
"""
def __init__(self, name=None, cost=0, size=1, capacity=1,
protocol=None, maxTransmitted=1, maxReceived=1):
"""
@param name: the name of this space-to-ground link
@type name: L{str}
@param cost: the cost of this space-to-ground link
@type cost: L{float}
@param size: the size of this storage module
@type size: L{float}
@param capacity: the data capacity of this space-to-ground link
@type capacity: L{int}
@param protocol: the protocol of this space-to-ground link
@type protocol: L{str}
@param maxTransmitted: the max data transmitted by this space-to-ground link each turn
@type maxTransmitted: L{int}
@param maxReceived: the max data received by this space-to-ground link each turn
@type maxReceived: L{int}
"""
Link.__init__(self, name=name, cost=cost,
size=size, capacity=capacity,
protocol=protocol,
maxTransmitted=maxTransmitted,
maxReceived=maxReceived)
def couldTransmit(self, data, receiver, txLocation, rxLocation, context=None):
"""
Checks if this space-to-ground link could transmit data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param receiver: the receiver receiving the data
@type receiver: L{Link}
@param txLocation: the transmitter location
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return super(SpaceGroundLink, self).couldTransmit(data, receiver) \
and txLocation.isOrbit() \
and rxLocation.isSurface() \
and txLocation.sector == rxLocation.sector
def couldReceive(self, data, transmitter, txLocation, rxLocation, context=None):
"""
Checks if this space-to-ground link could receive data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param transmitter: the transmitter transmitting the data
@type transmitter: L{Link}
@param txLocation: the transmitter location
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return super(SpaceGroundLink, self).couldReceive(data, transmitter) \
and txLocation.isOrbit() \
and rxLocation.isSurface() \
and txLocation.sector == rxLocation.sector
def isSGL(self):
"""
Checks if this is a space-to-ground link.
@return: L{bool}
"""
return True
class InterSatelliteLink(Link):
"""
An L{InterSatelliteLink} transports data between two satellites.
"""
def __init__(self, name=None, cost=0, size=1, capacity=1,
protocol=None, maxTransmitted=1, maxReceived=1):
"""
@param name: the name of this Link
@type name: L{str}
@param cost: the cost of this inter-satellite link
@type cost: L{float}
@param size: the size of this inter-satellite link
@type size: L{float}
@param capacity: the data capacity of this inter-satellite link
@type capacity: L{int}
@param protocol: the protocol of this inter-satellite link
@type protocol: L{str}
@param maxTransmitted: the max data transmitted by this inter-satellite link each turn
@type maxTransmitted: L{int}
@param maxReceived: the max data received by this inter-satellite link each turn
@type maxReceived: L{int}
"""
Link.__init__(self, name=name, cost=cost,
size=size, capacity=capacity,
protocol=protocol,
maxTransmitted=maxTransmitted,
maxReceived=maxReceived)
def couldTransmit(self, data, receiver, txLocation, rxLocation, context):
"""
Checks if this inter-satellite link could transmit data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param receiver: the receiver receiving the data
@type receiver: L{Link}
@param txLocation: the transmitter location
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return super(InterSatelliteLink, self).couldTransmit(data, receiver) \
and txLocation.isOrbit() \
and rxLocation.isOrbit() \
and (abs(txLocation.sector - rxLocation.sector) <= 1
or abs(txLocation.sector - rxLocation.sector)
>= context.getNumSectors() - 1)
def couldReceive(self, data, transmitter, txLocation, rxLocation, context):
"""
Checks if this inter-satellite link could receive data (state-independent).
@param data: the data to transmit
@type data: L{Data}
@param transmitter: the transmitter transmitting the data
@type transmitter: L{Link}
@param txLocation: the transmitter location
@type txLocation: L{Location}
@param rxLocation: the receiver location
@type rxLocation: L{Location}
@param context: the context
@type context: L{Context}
@return: L{bool}
"""
return super(InterSatelliteLink, self).couldReceive(data, transmitter) \
and txLocation.isOrbit() \
and rxLocation.isOrbit() \
and (abs(txLocation.sector - rxLocation.sector) <= 1
or abs(txLocation.sector - rxLocation.sector)
>= context.getNumSectors() - 1)
def isISL(self):
"""
Checks if this is an inter-satellite link.
@return: L{bool}
"""
return True
| apache-2.0 |
googleads/google-ads-java | google-ads-stubs-v9/src/test/java/com/google/ads/googleads/v9/services/HotelPerformanceViewServiceClientTest.java | 6048 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services;
import com.google.ads.googleads.v9.resources.HotelPerformanceView;
import com.google.ads.googleads.v9.resources.HotelPerformanceViewName;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.protobuf.AbstractMessage;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class HotelPerformanceViewServiceClientTest {
private static MockHotelPerformanceViewService mockHotelPerformanceViewService;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private HotelPerformanceViewServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockHotelPerformanceViewService = new MockHotelPerformanceViewService();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockHotelPerformanceViewService));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
HotelPerformanceViewServiceSettings settings =
HotelPerformanceViewServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = HotelPerformanceViewServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void getHotelPerformanceViewTest() throws Exception {
HotelPerformanceView expectedResponse =
HotelPerformanceView.newBuilder()
.setResourceName(HotelPerformanceViewName.of("[CUSTOMER_ID]").toString())
.build();
mockHotelPerformanceViewService.addResponse(expectedResponse);
HotelPerformanceViewName resourceName = HotelPerformanceViewName.of("[CUSTOMER_ID]");
HotelPerformanceView actualResponse = client.getHotelPerformanceView(resourceName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockHotelPerformanceViewService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetHotelPerformanceViewRequest actualRequest =
((GetHotelPerformanceViewRequest) actualRequests.get(0));
Assert.assertEquals(resourceName.toString(), actualRequest.getResourceName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getHotelPerformanceViewExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockHotelPerformanceViewService.addException(exception);
try {
HotelPerformanceViewName resourceName = HotelPerformanceViewName.of("[CUSTOMER_ID]");
client.getHotelPerformanceView(resourceName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getHotelPerformanceViewTest2() throws Exception {
HotelPerformanceView expectedResponse =
HotelPerformanceView.newBuilder()
.setResourceName(HotelPerformanceViewName.of("[CUSTOMER_ID]").toString())
.build();
mockHotelPerformanceViewService.addResponse(expectedResponse);
String resourceName = "resourceName-384566343";
HotelPerformanceView actualResponse = client.getHotelPerformanceView(resourceName);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockHotelPerformanceViewService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetHotelPerformanceViewRequest actualRequest =
((GetHotelPerformanceViewRequest) actualRequests.get(0));
Assert.assertEquals(resourceName, actualRequest.getResourceName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getHotelPerformanceViewExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockHotelPerformanceViewService.addException(exception);
try {
String resourceName = "resourceName-384566343";
client.getHotelPerformanceView(resourceName);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
| apache-2.0 |
archever/archever_me | app/wechat/conf.py | 91 | #coding: utf-8
APPID = "wx3808b7bc91e9f622"
APPSECRET = "1df0afffd4ac6858d32491c2312dbb4f" | apache-2.0 |
mmm2a/game-center | src/com/morgan/server/db/DbFlagAccessor.java | 466 | package com.morgan.server.db;
import com.morgan.server.util.flag.Flag;
import com.morgan.server.util.flag.FlagAccessor;
/**
* {@link FlagAccessor} for the db package.
*
* @author mark@mark-morgan.net (Mark Morgan)
*/
public interface DbFlagAccessor extends FlagAccessor {
@Flag(name = "persistence-unit",
description = "String name of the persistence unit to bind",
required = false,
defaultValue = "gamedb")
String persistenceUnit();
}
| apache-2.0 |
naveenbhaskar/gocd | server/src/main/java/com/thoughtworks/go/server/controller/actions/JsonAction.java | 4869 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.controller.actions;
import com.thoughtworks.go.config.validation.GoConfigValidity;
import com.thoughtworks.go.server.web.JsonView;
import com.thoughtworks.go.server.web.SimpleJsonView;
import com.thoughtworks.go.serverhealth.ServerHealthState;
import com.thoughtworks.go.util.GoConstants;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletResponse;
import java.util.LinkedHashMap;
import java.util.Map;
import static com.thoughtworks.go.util.GoConstants.ERROR_FOR_JSON;
import static com.thoughtworks.go.util.GoConstants.RESPONSE_CHARSET_JSON;
import static javax.servlet.http.HttpServletResponse.*;
public class JsonAction implements RestfulAction {
private final int status;
private final Object json;
public static JsonAction from(ServerHealthState serverHealthState) {
if (serverHealthState.isSuccess()) {
return jsonCreated(new LinkedHashMap());
}
Map<String, Object> jsonLog = new LinkedHashMap<>();
jsonLog.put(ERROR_FOR_JSON, serverHealthState.getDescription());
return new JsonAction(serverHealthState.getType().getHttpCode(), jsonLog);
}
public static JsonAction jsonCreated(Object json) {
return new JsonAction(SC_CREATED, json);
}
public static JsonAction jsonFound(Object json) {
return new JsonAction(SC_OK, json);
}
public static JsonAction jsonOK() {
return jsonOK(new LinkedHashMap());
}
public static JsonAction jsonNotAcceptable(Object json) {
return new JsonAction(SC_NOT_ACCEPTABLE, json);
}
public static JsonAction jsonForbidden() {
return new JsonAction(SC_FORBIDDEN, new LinkedHashMap());
}
public static JsonAction jsonForbidden(String message) {
Map<String, Object> map = new LinkedHashMap<>();
map.put(ERROR_FOR_JSON, message);
return new JsonAction(SC_FORBIDDEN, map);
}
public static JsonAction jsonForbidden(Exception e) {
return jsonForbidden(e.getMessage());
}
public static JsonAction jsonBadRequest(Object json) {
return new JsonAction(SC_BAD_REQUEST, json);
}
public static JsonAction jsonNotFound(Object json) {
return new JsonAction(SC_NOT_FOUND, json);
}
public static JsonAction jsonConflict(Object json) {
return new JsonAction(SC_CONFLICT, json);
}
public static JsonAction jsonByValidity(Object json, GoConfigValidity.InvalidGoConfig configValidity) {
return (configValidity.isType(GoConfigValidity.VT_CONFLICT) ||
configValidity.isType(GoConfigValidity.VT_MERGE_OPERATION_ERROR) ||
configValidity.isType(GoConfigValidity.VT_MERGE_POST_VALIDATION_ERROR) ||
configValidity.isType(GoConfigValidity.VT_MERGE_PRE_VALIDATION_ERROR)) ? jsonConflict(json) : jsonNotFound(json);
}
/**
* @deprecated replace with createView
*/
public ModelAndView respond(HttpServletResponse response) {
return new JsonModelAndView(response, json, status);
}
private JsonAction(int status, Object json) {
this.status = status;
this.json = json;
}
public ModelAndView createView() {
SimpleJsonView view = new SimpleJsonView(status, json);
return new ModelAndView(view, JsonView.asMap(json));
}
public static JsonAction jsonOK(Map jsonMap) {
return new JsonAction(SC_OK, jsonMap);
}
private class JsonModelAndView extends ModelAndView {
public String getViewName() {
return "jsonView";
}
public JsonModelAndView(HttpServletResponse response, Object json, int status) {
super(new JsonView(), JsonView.asMap(json));
// In IE, there's a problem with caching. We want to cache if we can.
// This will force the browser to clear the cache only for this page.
// If any other pages need to clear the cache, we might want to move this
// logic to an intercepter.
response.addHeader("Cache-Control", GoConstants.CACHE_CONTROL);
response.setStatus(status);
response.setContentType(RESPONSE_CHARSET_JSON);
}
}
}
| apache-2.0 |
cping/LGame | Java/old/AWT_ver/src/org/loon/framework/javase/game/core/resource/Resources.java | 8682 | package org.loon.framework.javase.game.core.resource;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.HashMap;
import java.util.Iterator;
import org.loon.framework.javase.game.core.LSystem;
import org.loon.framework.javase.game.utils.StringUtils;
import org.loon.framework.javase.game.utils.collection.ArrayByte;
/**
*
* Copyright 2008 - 2009
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loonframework
* @author chenpeng
* @email:ceponline@yahoo.com.cn
* @version 0.1
*/
public abstract class Resources {
private static ClassLoader classLoader;
static {
try {
classLoader = Resources.class.getClassLoader();
} catch (Exception e) {
classLoader = Thread.currentThread().getContextClassLoader();
}
}
final static private Object LOCK = new Object();
private final static HashMap<String, Object> lazyResources = new HashMap<String, Object>(
LSystem.DEFAULT_MAX_CACHE_SIZE);
private Resources() {
}
/**
* 获得资源名迭代器
*
* @return
*/
public static Iterator<String> getNames() {
synchronized (LOCK) {
return lazyResources.keySet().iterator();
}
}
/**
* 检查指定资源名是否存在
*
* @param resName
* @return
*/
public static boolean contains(String resName) {
synchronized (LOCK) {
return (lazyResources.get(resName) != null);
}
}
/**
* 删除指定名称的资源
*
* @param resName
*/
public static void remove(String resName) {
synchronized (LOCK) {
lazyResources.remove(resName);
}
}
/**
* 通过url读取网络文件流
*
* @param uri
* @return
*/
final static public byte[] getHttpStream(final String uri) {
URL url;
try {
url = new URL(uri);
} catch (Exception e) {
return null;
}
InputStream is = null;
try {
is = url.openStream();
} catch (Exception e) {
return null;
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] arrayByte = null;
try {
arrayByte = new byte[4096];
int read;
while ((read = is.read(arrayByte)) >= 0) {
os.write(arrayByte, 0, read);
}
arrayByte = os.toByteArray();
} catch (IOException e) {
return null;
} finally {
try {
if (os != null) {
os.close();
os = null;
}
if (is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
return arrayByte;
}
/**
* 读取指定资源为InputStream
*
* @param fileName
* @return
*/
public static InputStream getResourceAsStream(final String fileName) {
if ((fileName.indexOf("file:") >= 0) || (fileName.indexOf(":/") > 0)) {
try {
URL url = new URL(fileName);
return new BufferedInputStream(url.openStream());
} catch (Exception e) {
return null;
}
}
return new ByteArrayInputStream(getResource(fileName).getData());
}
/**
* 读取指定资源为InputStream
*
* @param fileName
* @return
*/
public static InputStream getNotCacheResourceAsStream(final String fileName) {
if ((fileName.indexOf("file:") >= 0) || (fileName.indexOf(":/") > 0)) {
try {
URL url = new URL(fileName);
return new BufferedInputStream(url.openStream());
} catch (Exception e) {
return null;
}
}
return new ByteArrayInputStream(getNotCacheResource(fileName).getData());
}
public static InputStream openResource(final String resName)
throws IOException {
File file = new File(resName);
if (file.exists()) {
try {
return new BufferedInputStream(new FileInputStream(file));
} catch (FileNotFoundException e) {
throw new IOException(resName + " file not found !");
}
} else {
if (classLoader != null) {
InputStream in = null;
try {
in = classLoader.getResourceAsStream(resName);
} catch (Exception e) {
throw new RuntimeException(resName + " not found!");
}
if (in == null) {
in = LSystem.getResourceAsStream(resName);
}
return in;
} else {
try {
return new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new IOException(resName + " not found!");
}
}
}
}
private static boolean isExists(String fileName) {
return new File(fileName).exists();
}
/**
* 将指定文件转为ArrayByte
*
* @param fileName
* @return
*/
public static ArrayByte getResource(final String fileName) {
String innerName = fileName;
String keyName = innerName.replaceAll(" ", "").toLowerCase();
synchronized (LOCK) {
if (lazyResources.size() > LSystem.DEFAULT_MAX_CACHE_SIZE) {
lazyResources.clear();
System.gc();
}
byte[] data = (byte[]) lazyResources.get(keyName);
if (data != null) {
return new ArrayByte(data);
}
}
BufferedInputStream in = null;
boolean canInner = innerName.startsWith(".")
|| (innerName.startsWith("/") && LSystem.isWindows());
if (!isExists(innerName) && !canInner) {
innerName = ("/" + innerName).intern();
canInner = true;
}
if (canInner) {
if (innerName.startsWith(".")) {
innerName = innerName.substring(1, innerName.length());
}
innerName = StringUtils.replaceIgnoreCase(innerName, "\\", "/");
innerName = innerName.substring(1, innerName.length());
} else {
if (innerName.startsWith("\\")) {
innerName = innerName.substring(1, innerName.length());
}
}
if (!canInner) {
try {
in = new BufferedInputStream(new FileInputStream(new File(
innerName)));
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
} else {
in = new BufferedInputStream(LSystem.getResourceAsStream(innerName));
}
ArrayByte byteArray = new ArrayByte();
try {
byteArray.write(in);
in.close();
byteArray.reset();
lazyResources.put(keyName, byteArray.getData());
return byteArray;
} catch (IOException ex) {
throw new RuntimeException(fileName + " file not found !");
}
}
public static ArrayByte getNotCacheResource(final String fileName) {
String innerName = fileName;
BufferedInputStream in = null;
boolean canInner = innerName.startsWith(".")
|| (innerName.startsWith("/") && LSystem.isWindows());
if (!isExists(innerName) && !canInner) {
innerName = ("/" + innerName).intern();
canInner = true;
}
if (canInner) {
if (innerName.startsWith(".")) {
innerName = innerName.substring(1, innerName.length());
}
innerName = StringUtils.replaceIgnoreCase(innerName, "\\", "/");
innerName = innerName.substring(1, innerName.length());
} else {
if (innerName.startsWith("\\")) {
innerName = innerName.substring(1, innerName.length());
}
}
if (!canInner) {
try {
in = new BufferedInputStream(new FileInputStream(new File(
innerName)));
} catch (FileNotFoundException ex) {
throw new RuntimeException(ex);
}
} else {
in = new BufferedInputStream(LSystem.getResourceAsStream(innerName));
}
ArrayByte byteArray = new ArrayByte();
try {
byteArray.write(in);
in.close();
byteArray.reset();
return byteArray;
} catch (IOException ex) {
throw new RuntimeException(fileName + " file not found !");
}
}
/**
* 将InputStream转为byte[]
*
* @param is
* @return
*/
final static public byte[] getDataSource(InputStream is) {
if (is == null) {
return null;
}
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
byte[] bytes = new byte[8192];
try {
int read;
while ((read = is.read(bytes)) >= 0) {
byteArrayOutputStream.write(bytes, 0, read);
}
bytes = byteArrayOutputStream.toByteArray();
} catch (IOException e) {
return null;
} finally {
try {
if (byteArrayOutputStream != null) {
byteArrayOutputStream.flush();
byteArrayOutputStream = null;
}
if (is != null) {
is.close();
is = null;
}
} catch (IOException e) {
}
}
return bytes;
}
public static void destroy() {
lazyResources.clear();
}
public void finalize() {
destroy();
}
}
| apache-2.0 |
talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/sequencediagram/command/CommandDivider.java | 2370 | /* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.sequencediagram.command;
import net.sourceforge.plantuml.LineLocation;
import net.sourceforge.plantuml.command.CommandExecutionResult;
import net.sourceforge.plantuml.command.SingleLineCommand2;
import net.sourceforge.plantuml.command.regex.IRegex;
import net.sourceforge.plantuml.command.regex.RegexConcat;
import net.sourceforge.plantuml.command.regex.RegexLeaf;
import net.sourceforge.plantuml.command.regex.RegexResult;
import net.sourceforge.plantuml.cucadiagram.Display;
import net.sourceforge.plantuml.sequencediagram.SequenceDiagram;
public class CommandDivider extends SingleLineCommand2<SequenceDiagram> {
public CommandDivider() {
super(getRegexConcat());
}
static IRegex getRegexConcat() {
return RegexConcat.build(CommandDivider.class.getName(), RegexLeaf.start(), //
new RegexLeaf("=="), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("LABEL", "(.*)"), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("=="), RegexLeaf.end()); //
}
@Override
protected CommandExecutionResult executeArg(SequenceDiagram diagram, LineLocation location, RegexResult arg) {
final Display strings = Display.getWithNewlines(arg.get("LABEL", 0));
diagram.divider(strings);
return CommandExecutionResult.ok();
}
}
| apache-2.0 |
box/mojito | webapp/src/main/java/com/box/l10n/mojito/rest/cli/GitInfo.java | 1127 | package com.box.l10n.mojito.rest.cli;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Component;
/**
* To optionally load info about git
*
* @author jaurambault
*/
@Component("GitInfoWebapp")
@PropertySource("classpath:git.properties")
@ConfigurationProperties(prefix = "git")
public class GitInfo {
private String branch;
private final Commit commit = new Commit();
public String getBranch() {
return this.branch;
}
public void setBranch(String branch) {
this.branch = branch;
}
public Commit getCommit() {
return this.commit;
}
public static class Commit {
private String id;
private String time;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTime() {
return this.time;
}
public void setTime(String time) {
this.time = time;
}
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-code-generator/src/main/java/com/amazonaws/codegen/AddShapes.java | 18249 | /*
* Copyright (c) 2016. Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.codegen;
import com.amazonaws.codegen.internal.TypeUtils;
import com.amazonaws.codegen.model.config.customization.CustomizationConfig;
import com.amazonaws.codegen.model.intermediate.EnumModel;
import com.amazonaws.codegen.model.intermediate.ListModel;
import com.amazonaws.codegen.model.intermediate.MapModel;
import com.amazonaws.codegen.model.intermediate.MemberModel;
import com.amazonaws.codegen.model.intermediate.ParameterHttpMapping;
import com.amazonaws.codegen.model.intermediate.Protocol;
import com.amazonaws.codegen.model.intermediate.ReturnTypeModel;
import com.amazonaws.codegen.model.intermediate.ShapeModel;
import com.amazonaws.codegen.model.intermediate.VariableModel;
import com.amazonaws.codegen.model.service.Location;
import com.amazonaws.codegen.model.service.Member;
import com.amazonaws.codegen.model.service.ServiceModel;
import com.amazonaws.codegen.model.service.Shape;
import com.amazonaws.codegen.naming.NamingStrategy;
import com.amazonaws.util.StringUtils;
import java.util.List;
import java.util.Map;
import static com.amazonaws.codegen.internal.DocumentationUtils.generateGetterDocumentation;
import static com.amazonaws.codegen.internal.DocumentationUtils.generateSetterDocumentation;
import static com.amazonaws.codegen.internal.TypeUtils.LIST_AUTO_CONSTRUCT_IMPL;
import static com.amazonaws.codegen.internal.TypeUtils.LIST_DEFAULT_IMPL;
import static com.amazonaws.codegen.internal.TypeUtils.LIST_INTERFACE;
import static com.amazonaws.codegen.internal.TypeUtils.MAP_AUTO_CONSTRUCT_IMPL;
import static com.amazonaws.codegen.internal.TypeUtils.MAP_DEFAULT_IMPL;
import static com.amazonaws.codegen.internal.TypeUtils.MAP_INTERFACE;
import static com.amazonaws.codegen.internal.TypeUtils.getDataTypeMapping;
import static com.amazonaws.codegen.internal.Utils.capitialize;
import static com.amazonaws.codegen.internal.Utils.isEnumShape;
import static com.amazonaws.codegen.internal.Utils.isListShape;
import static com.amazonaws.codegen.internal.Utils.isMapShape;
import static com.amazonaws.codegen.internal.Utils.isScalar;
abstract class AddShapes {
private final IntermediateModelBuilder builder;
private final NamingStrategy namingStrategy;
AddShapes(IntermediateModelBuilder builder) {
this.builder = builder;
this.namingStrategy = builder.getNamingStrategy();
}
protected final TypeUtils getTypeUtils() {
return builder.getTypeUtils();
}
protected final NamingStrategy getNamingStrategy() {
return namingStrategy;
}
protected final ServiceModel getServiceModel() {
return builder.getService();
}
protected final CustomizationConfig getCustomizationConfig() {
return builder.getCustomConfig();
}
protected final ShapeModel generateShapeModel(String javaClassName, String shapeName) {
final ShapeModel shapeModel = new ShapeModel(shapeName);
shapeModel.setShapeName(javaClassName);
final Shape shape = getServiceModel().getShapes().get(shapeName);
shapeModel.setDocumentation(shape.getDocumentation());
shapeModel.setVariable(new VariableModel(getNamingStrategy().getVariableName(javaClassName),
javaClassName));
// contains the list of c2j member names that are required for this shape.
shapeModel.setRequired(shape.getRequired());
shapeModel.setDeprecated(shape.isDeprecated());
shapeModel.setWrapper(shape.isWrapper());
final Map<String, Member> members = shape.getMembers();
if (members != null) {
boolean hasHeaderMember = false;
boolean hasStatusCodeMember = false;
boolean hasPayloadMember = false;
boolean hasStreamingMember = false;
for (Map.Entry<String, Member> memberEntry : members.entrySet()) {
String c2jMemberName = memberEntry.getKey();
Member c2jMemberDefinition = memberEntry.getValue();
Shape parentShape = shape;
MemberModel memberModel = generateMemberModel(c2jMemberName, c2jMemberDefinition,
getProtocol(), parentShape,
getServiceModel().getShapes());
if (memberModel.getHttp().getLocation() == Location.HEADER) {
hasHeaderMember = true;
} else if (memberModel.getHttp().getLocation() == Location.STATUS_CODE) {
hasStatusCodeMember = true;
} else if (memberModel.getHttp().getIsPayload()) {
hasPayloadMember = true;
if (memberModel.getHttp().getIsStreaming()) {
hasStreamingMember = true;
}
}
shapeModel.addMember(memberModel);
}
shapeModel.withHasHeaderMember(hasHeaderMember)
.withHasStatusCodeMember(hasStatusCodeMember)
.withHasPayloadMember(hasPayloadMember)
.withHasStreamingMember(hasStreamingMember);
}
final List<String> enumValues = shape.getEnumValues();
if (enumValues != null && !enumValues.isEmpty()) {
for (String enumValue : enumValues) {
// TODO handle useRealName from Coral if explicitly mentioned in
// the customization.
shapeModel.addEnum(
new EnumModel(getNamingStrategy().getEnumValueName(enumValue), enumValue));
}
}
return shapeModel;
}
private MemberModel generateMemberModel(String c2jMemberName, Member c2jMemberDefinition,
String protocol, Shape parentShape,
Map<String, Shape> allC2jShapes) {
final String c2jShapeName = c2jMemberDefinition.getShape();
final String variableName = getNamingStrategy().getVariableName(c2jMemberName);
final String variableType = getTypeUtils().getJavaDataType(allC2jShapes, c2jShapeName);
final String variableDeclarationType = getTypeUtils()
.getJavaDataType(allC2jShapes, c2jShapeName, getCustomizationConfig());
//If member is idempotent, then it should be of string type
//Else throw IllegalArgumentException.
if (c2jMemberDefinition.isIdempotencyToken() &&
!variableType.equals(String.class.getSimpleName())) {
throw new IllegalArgumentException(c2jMemberName +
" is idempotent. It's shape should be string type but it is of " +
variableType + " type.");
}
final MemberModel memberModel = new MemberModel();
memberModel.withC2jName(c2jMemberName)
.withC2jShape(c2jShapeName)
.withName(capitialize(c2jMemberName))
.withVariable(new VariableModel(variableName, variableType, variableDeclarationType)
.withDocumentation(c2jMemberDefinition.getDocumentation()))
.withSetterModel(new VariableModel(variableName, variableType, variableDeclarationType)
.withDocumentation(generateSetterDocumentation()))
.withGetterModel(new ReturnTypeModel(variableType).withDocumentation(generateGetterDocumentation()));
memberModel.setDocumentation(c2jMemberDefinition.getDocumentation());
memberModel.setDeprecated(c2jMemberDefinition.isDeprecated());
memberModel.withGetterMethodName(namingStrategy.getGetterMethodName(c2jMemberName))
.withSetterMethodName(namingStrategy.getSetterMethodName(c2jMemberName))
.withFluentSetterMethodName(namingStrategy.getFluentSetterMethodName(c2jMemberName));
memberModel.setIdempotencyToken(c2jMemberDefinition.isIdempotencyToken());
// Pass the xmlNameSpace from the member reference
if (c2jMemberDefinition.getXmlNamespace() != null) {
memberModel.setXmlNameSpaceUri(c2jMemberDefinition.getXmlNamespace().getUri());
}
// Additional member model metadata for list/map/enum types
fillContainerTypeMemberMetadata(allC2jShapes, c2jMemberDefinition.getShape(), memberModel,
protocol);
final ParameterHttpMapping httpMapping = generateParameterHttpMapping(parentShape,
c2jMemberName,
c2jMemberDefinition,
protocol,
allC2jShapes);
final String payload = parentShape.getPayload();
httpMapping.withPayload(payload != null && payload.equals(c2jMemberName))
.withStreaming(allC2jShapes.get(c2jMemberDefinition.getShape()).isStreaming());
memberModel.setHttp(httpMapping);
memberModel.setJsonValue(c2jMemberDefinition.isJsonvalue());
return memberModel;
}
private ParameterHttpMapping generateParameterHttpMapping(Shape parentShape,
String memberName,
Member member,
String protocol,
Map<String, Shape> allC2jShapes) {
ParameterHttpMapping mapping = new ParameterHttpMapping();
Shape memberShape = allC2jShapes.get(member.getShape());
mapping.withLocation(Location.forValue(member.getLocation()))
.withPayload(member.isPayload())
.withStreaming(member.isStreaming())
.withFlattened(member.isFlattened() || memberShape.isFlattened())
.withUnmarshallLocationName(deriveUnmarshallerLocationName(memberName, member))
.withMarshallLocationName(deriveMarshallerLocationName(memberName, member, protocol))
.withIsGreedy(isGreedy(parentShape, allC2jShapes, mapping));
return mapping;
}
/**
* @param parentShape Shape containing the member in question.
* @param allC2jShapes All shapes in the service model.
* @param mapping Mapping being built.
* @return True if the member is bound to a greedy label, false otherwise.
*/
private boolean isGreedy(Shape parentShape, Map<String, Shape> allC2jShapes, ParameterHttpMapping mapping) {
if (mapping.getLocation() == Location.URI) {
// If the location is URI we can assume the parent shape is an input shape.
final String requestUri = findRequestUri(parentShape, allC2jShapes);
if (requestUri.contains(String.format("{%s+}", mapping.getMarshallLocationName()))) {
return true;
}
}
return false;
}
/**
* Given an input shape, finds the Request URI for the operation that input is referenced from.
*
* @param parentShape Input shape to find operation's request URI for.
* @param allC2jShapes All shapes in the service model.
* @return Request URI for operation.
* @throws RuntimeException If operation can't be found.
*/
private String findRequestUri(Shape parentShape, Map<String, Shape> allC2jShapes) {
return builder.getService().getOperations().values().stream()
.filter(o -> o.getInput() != null)
.filter(o -> allC2jShapes.get(o.getInput().getShape()).equals(parentShape))
.map(o -> o.getHttp().getRequestUri())
.findFirst().orElseThrow(() -> new RuntimeException("Could not find request URI for input shape"));
}
private String deriveUnmarshallerLocationName(String memberName, Member member) {
final String locationName = member.getLocationName();
if (locationName != null && !locationName.trim().isEmpty()) {
return locationName;
}
return memberName;
}
private String deriveMarshallerLocationName(String memberName, Member member, String protocol) {
final String queryName = member.getQueryName();
if (queryName != null && !queryName.trim().isEmpty()) {
return queryName;
} else {
final String locationName = member.getLocationName();
if (locationName != null && !locationName.trim().isEmpty()) {
if (protocol.equals(Protocol.EC2.getValue())) {
return StringUtils.upperCase(locationName.substring(0, 1)) +
locationName.substring(1);
}
return locationName;
} else {
return memberName;
}
}
}
private void fillContainerTypeMemberMetadata(Map<String, Shape> c2jShapes,
String memberC2jShapeName, MemberModel memberModel,
String protocol) {
final Shape memberC2jShape = c2jShapes.get(memberC2jShapeName);
if (isListShape(memberC2jShape)) {
MemberModel listMemberModel;
Member listMemberDefinition = memberC2jShape.getListMember();
String listMemberC2jShapeName = listMemberDefinition.getShape();
Shape listMemberC2jShape = c2jShapes.get(listMemberC2jShapeName);
listMemberModel = generateMemberModel("member", listMemberDefinition, protocol,
memberC2jShape, c2jShapes);
final String listImpl = getCustomizationConfig().isUseAutoConstructList() ?
getDataTypeMapping(LIST_AUTO_CONSTRUCT_IMPL) :
getDataTypeMapping(LIST_DEFAULT_IMPL);
memberModel.setListModel(
new ListModel(getTypeUtils().getJavaDataType(c2jShapes, listMemberC2jShapeName),
memberC2jShape.getListMember().getLocationName(), listImpl,
getDataTypeMapping(LIST_INTERFACE), listMemberModel));
if (listMemberC2jShape.getEnumValues() != null) {
memberModel
.setEnumType(getNamingStrategy().getJavaClassName(listMemberC2jShapeName));
}
} else if (isMapShape(memberC2jShape)) {
MemberModel mapKeyModel = null;
MemberModel mapValueModel;
Member mapKeyMemberDefinition = memberC2jShape.getMapKeyType();
String mapKeyShapeName = mapKeyMemberDefinition.getShape();
Shape mapKeyShape = c2jShapes.get(mapKeyShapeName);
Member mapValueMemberDefinition = memberC2jShape.getMapValueType();
// Only construct the nested key model if the key of the map
// itself is Enum shape. Throw exception if the nested key type is complex
// because we don't support complex map keys.
if (isEnumShape(mapKeyShape)) {
mapKeyModel = generateMemberModel("key", mapKeyMemberDefinition, protocol,
memberC2jShape, c2jShapes);
} else if (!isScalar(mapKeyShape)) {
throw new IllegalStateException(
"The key type of " + mapKeyShapeName + " must be a scalar!");
}
mapValueModel = generateMemberModel("value", mapValueMemberDefinition, protocol,
memberC2jShape, c2jShapes);
final String mapImpl = getCustomizationConfig().isUseAutoConstructMap() ?
getDataTypeMapping(MAP_AUTO_CONSTRUCT_IMPL) :
getDataTypeMapping(MAP_DEFAULT_IMPL);
String keyLocation = memberC2jShape.getMapKeyType().getLocationName() != null ?
memberC2jShape.getMapKeyType().getLocationName() : "key";
String valueLocation = memberC2jShape.getMapValueType().getLocationName() != null ?
memberC2jShape.getMapValueType().getLocationName() : "value";
memberModel.setMapModel(new MapModel(mapImpl, getDataTypeMapping(MAP_INTERFACE),
getTypeUtils().getJavaDataType(c2jShapes,
memberC2jShape
.getMapKeyType()
.getShape()),
keyLocation, mapKeyModel, getTypeUtils()
.getJavaDataType(c2jShapes, memberC2jShape
.getMapValueType().getShape()),
valueLocation, mapValueModel));
} else if (memberC2jShape.getEnumValues() != null) { // enum values
memberModel.withEnumType(getNamingStrategy().getJavaClassName(memberC2jShapeName));
}
}
protected String getProtocol() {
return getServiceModel().getMetadata().getProtocol();
}
}
| apache-2.0 |
priya5713/GIT_App2017 | app/src/main/java/com/git/priyavidhi/Developer.java | 782 | package com.git.priyavidhi;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
//import com.androidappbazzar.garagefinder.R;
public class Developer extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// setContentView(R.layout.activity_developer);
android.support.v7.widget.Toolbar toolbar = (android.support.v7.widget.Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final ActionBar ab = getSupportActionBar();
ab.setHomeAsUpIndicator(R.drawable.ic_action_navigation_menu);
ab.setDisplayHomeAsUpEnabled(true);
ab.setTitle("Contact us");
}
}
| apache-2.0 |
RayRuizhiLiao/ITK_4D | Wrapping/Generators/Python/Tests/ReadDicomAndReadTag.py | 2185 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
# Tests:
# - Reading dicom files from a directory
# - Read a tag (patient's name) through the MetaDataDictionary
# - Read a tag (patient's name) directly with the GetValueFromTag() method
# - Compare the two tags if they are the same
from __future__ import print_function
import itk
import sys
imdir = sys.argv[1]
image_t = itk.Image[itk.F, 3]
# Set up reader
reader = itk.ImageSeriesReader[image_t].New()
dicomIO = itk.GDCMImageIO.New()
dicomFN = itk.GDCMSeriesFileNames.New()
reader.SetImageIO(dicomIO)
# Get file names
dicomFN.SetUseSeriesDetails(True)
dicomFN.SetDirectory(imdir)
# Get the first image series
uids = dicomFN.GetSeriesUIDs()
fnames = dicomFN.GetFileNames(uids[0])
# Read in the files
reader.SetFileNames(fnames)
reader.Update()
image = reader.GetOutput()
# Now access the meta data dictionary
metad = dicomIO.GetMetaDataDictionary()
# Get the patient's name
name1 = metad["0010|0010"]
# Other way to get the tag
# GetValueFromTag(tagkey, tagvalue)
# tagvalue is an empty string, in C++ it is passed by
# reference. Here we pass an empty string, and the
# actual value is returned as the second variable.
found, name2 = dicomIO.GetValueFromTag("0010|0010", "")
assert(name1 == name2)
# Check also if we returned the right tag
found, name3 = dicomIO.GetLabelFromTag("0010|0010", "")
assert(name3 == "Patient's Name")
| apache-2.0 |
googleapis/java-analytics-admin | proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/CreateDisplayVideo360AdvertiserLinkRequestOrBuilder.java | 2889 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
package com.google.analytics.admin.v1alpha;
public interface CreateDisplayVideo360AdvertiserLinkRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.analytics.admin.v1alpha.CreateDisplayVideo360AdvertiserLinkRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. Example format: properties/1234
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
java.lang.String getParent();
/**
*
*
* <pre>
* Required. Example format: properties/1234
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
com.google.protobuf.ByteString getParentBytes();
/**
*
*
* <pre>
* Required. The DisplayVideo360AdvertiserLink to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the displayVideo360AdvertiserLink field is set.
*/
boolean hasDisplayVideo360AdvertiserLink();
/**
*
*
* <pre>
* Required. The DisplayVideo360AdvertiserLink to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The displayVideo360AdvertiserLink.
*/
com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink
getDisplayVideo360AdvertiserLink();
/**
*
*
* <pre>
* Required. The DisplayVideo360AdvertiserLink to create.
* </pre>
*
* <code>
* .google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLink display_video_360_advertiser_link = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
com.google.analytics.admin.v1alpha.DisplayVideo360AdvertiserLinkOrBuilder
getDisplayVideo360AdvertiserLinkOrBuilder();
}
| apache-2.0 |
aajjbb/contest-files | AtCoder/ABC-219/A.cc | 788 | #include <bits/stdc++.h>
template<typename T> T gcd(T a, T b) {
if(!b) return a;
return gcd(b, a % b);
}
template<typename T> T lcm(T a, T b) {
return a * b / gcd(a, b);
}
template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; }
template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }
int in() { int x; scanf("%d", &x); return x; }
using namespace std;
typedef long long Int;
typedef unsigned long long uInt;
typedef unsigned uint;
int main(void) {
int X;
cin >> X;
if (X >= 90) {
cout << "expert\n";
} else if (X >= 0 && X < 40) {
cout << 40 - X << endl;
} else if (X >= 40 && X < 70) {
cout << 70 - X << endl;
} else if (X >= 70 && X < 90) {
cout << 90 - X << "\n";
}
return 0;
}
| apache-2.0 |
mtreinish/subunit2sql | subunit2sql/exceptions.py | 1917 | # Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
class Subunit2SQLException(Exception):
"""Base Subunit2SQL Exception.
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = "An unknown exception occurred"
def __init__(self, *args, **kwargs):
super(Subunit2SQLException, self).__init__()
try:
self._error_string = self.message % kwargs
except Exception:
# at least get the core message out if something happened
self._error_string = self.message
if len(args) > 0:
# If there is a non-kwarg parameter, assume it's the error
# message or reason description and tack it on to the end
# of the exception message
# Convert all arguments into their string representations...
args = ["%s" % arg for arg in args]
self._error_string = (self._error_string +
"\nDetails: %s" % '\n'.join(args))
def __str__(self):
return self._error_string
class InvalidRunCount(Subunit2SQLException):
message = "Invalid Run Count"
class UnknownStatus(Subunit2SQLException):
message = "Unknown test status"
| apache-2.0 |
xiaonanln/myleetcode-python | src/392. Is Subsequence - 2.py | 334 | class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
sl = len(s)
if not sl:
return True
cp = 0
for i, c in enumerate(t):
if c == s[cp]:
cp += 1
if cp == sl:
return True
return False
s = "abc"
t = "ahbgdc"
print Solution().isSubsequence(s, t) | apache-2.0 |
SMB-TEC/extended-objects | neo4j/src/main/java/com/buschmais/xo/neo4j/impl/datastore/AbstractEmbeddedNeo4jDatastore.java | 5243 | package com.buschmais.xo.neo4j.impl.datastore;
import java.util.Map;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Label;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.schema.IndexDefinition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.buschmais.xo.neo4j.api.model.Neo4jLabel;
import com.buschmais.xo.neo4j.impl.datastore.metadata.IndexedPropertyMetadata;
import com.buschmais.xo.neo4j.impl.datastore.metadata.NodeMetadata;
import com.buschmais.xo.neo4j.impl.datastore.metadata.PropertyMetadata;
import com.buschmais.xo.spi.metadata.method.IndexedPropertyMethodMetadata;
import com.buschmais.xo.spi.metadata.method.PrimitivePropertyMethodMetadata;
import com.buschmais.xo.spi.metadata.type.EntityTypeMetadata;
import com.buschmais.xo.spi.metadata.type.TypeMetadata;
/**
* Abstract base implementation for embedded graph stores.
*/
public abstract class AbstractEmbeddedNeo4jDatastore extends AbstractNeo4jDatastore<EmbeddedNeo4jDatastoreSession> {
private static final Logger LOGGER = LoggerFactory.getLogger(EmbeddedNeo4jDatastore.class);
protected final GraphDatabaseService graphDatabaseService;
/**
* Constructor.
*
* @param graphDatabaseService
* The graph database service.
*/
public AbstractEmbeddedNeo4jDatastore(GraphDatabaseService graphDatabaseService) {
this.graphDatabaseService = graphDatabaseService;
}
@Override
public EmbeddedNeo4jDatastoreSession createSession() {
return new EmbeddedNeo4jDatastoreSession(graphDatabaseService);
}
@Override
public void init(Map<Class<?>, TypeMetadata> registeredMetadata) {
try (Transaction transaction = graphDatabaseService.beginTx()) {
for (TypeMetadata typeMetadata : registeredMetadata.values()) {
if (typeMetadata instanceof EntityTypeMetadata) {
EntityTypeMetadata<NodeMetadata> entityTypeMetadata = (EntityTypeMetadata<NodeMetadata>) typeMetadata;
// check for indexed property declared in type
ensureIndex(entityTypeMetadata, entityTypeMetadata.getIndexedProperty());
ensureIndex(entityTypeMetadata, entityTypeMetadata.getDatastoreMetadata().getUsingIndexedPropertyOf());
}
}
transaction.success();
}
}
/**
* Ensures that an index exists for the given entity and property.
*
* @param entityTypeMetadata
* The entity.
* @param indexedProperty
* The index metadata.
*/
private void ensureIndex(EntityTypeMetadata<NodeMetadata> entityTypeMetadata, IndexedPropertyMethodMetadata<IndexedPropertyMetadata> indexedProperty) {
if (indexedProperty != null) {
IndexedPropertyMetadata datastoreMetadata = indexedProperty.getDatastoreMetadata();
if (datastoreMetadata.isCreate()) {
Neo4jLabel label = entityTypeMetadata.getDatastoreMetadata().getDiscriminator();
PrimitivePropertyMethodMetadata<PropertyMetadata> propertyMethodMetadata = indexedProperty.getPropertyMethodMetadata();
if (label != null && propertyMethodMetadata != null) {
ensureIndex(label, propertyMethodMetadata, datastoreMetadata.isUnique());
}
}
}
}
/**
* Ensures that an index exists for the given label and property.
*
* @param label
* The label.
* @param propertyMethodMetadata
* The property metadata.
*/
private void ensureIndex(Neo4jLabel label, PrimitivePropertyMethodMetadata<PropertyMetadata> propertyMethodMetadata, boolean unique) {
PropertyMetadata propertyMetadata = propertyMethodMetadata.getDatastoreMetadata();
IndexDefinition index = findIndex(label.getLabel(), propertyMetadata.getName());
if (index == null) {
if (unique) {
LOGGER.debug("Creating constraint for label {} on property '{}'.", label, propertyMetadata.getName());
graphDatabaseService.schema().constraintFor(label.getLabel()).assertPropertyIsUnique(propertyMetadata.getName()).create();
} else {
LOGGER.debug("Creating index for label {} on property '{}'.", label, propertyMetadata.getName());
graphDatabaseService.schema().indexFor(label.getLabel()).on(propertyMetadata.getName()).create();
}
}
}
/**
* Find an existing index.
*
* @param label
* The label.
* @param propertyName
* The property name.
* @return The index or <code>null</code> if it does not exist.
*/
private IndexDefinition findIndex(Label label, String propertyName) {
final Iterable<IndexDefinition> indexes = graphDatabaseService.schema().getIndexes(label);
for (IndexDefinition indexDefinition : indexes) {
for (String key : indexDefinition.getPropertyKeys()) {
if (key.equals(propertyName)) {
return indexDefinition;
}
}
}
return null;
}
}
| apache-2.0 |
eyedol/birudo | birudo/mobile/src/main/java/org/addhen/birudo/ui/widget/SimpleDividerItemDecoration.java | 3408 | package org.addhen.birudo.ui.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.support.v4.view.ViewCompat;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
public class SimpleDividerItemDecoration extends RecyclerView.ItemDecoration {
public static final int HORIZONTAL_LIST = LinearLayoutManager.HORIZONTAL;
public static final int VERTICAL_LIST = LinearLayoutManager.VERTICAL;
private static final int[] ATTRS = new int[]{
android.R.attr.listDivider
};
private Drawable mDivider;
private int mOrientation;
public SimpleDividerItemDecoration(Context context, int orientation) {
final TypedArray a = context.obtainStyledAttributes(ATTRS);
mDivider = a.getDrawable(0);
a.recycle();
setOrientation(orientation);
}
public void setOrientation(int orientation) {
if (orientation != HORIZONTAL_LIST && orientation != VERTICAL_LIST) {
throw new IllegalArgumentException("invalid orientation");
}
mOrientation = orientation;
}
@Override
public void onDraw(Canvas c, RecyclerView parent, RecyclerView.State state) {
if (mOrientation == VERTICAL_LIST) {
drawVertical(c, parent);
} else {
drawHorizontal(c, parent);
}
}
public void drawVertical(Canvas c, RecyclerView parent) {
final int left = parent.getPaddingLeft();
final int right = parent.getWidth() - parent.getPaddingRight();
final int childCount = parent.getChildCount();
for (int i = 0; i < childCount; i++) {
final View child = parent.getChildAt(i);
final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) child
.getLayoutParams();
final int top = child.getBottom() + params.bottomMargin +
Math.round(ViewCompat.getTranslationY(child));
final int bottom = top + mDivider.getIntrinsicHeight();
mDivider.setBounds(left, top, right, bottom);
mDivider.draw(c);
}
}
public void drawHorizontal(Canvas c, RecyclerView parent) {
final int top = parent.getPaddingTop();
final int bottom = parent.getHeight() - parent.getPaddingBottom();
final int childCount = parent.getChildCount();
for (int i = 0; i < childCount; i++) {
final View child = parent.getChildAt(i);
final RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) child
.getLayoutParams();
final int left = child.getRight() + params.rightMargin +
Math.round(ViewCompat.getTranslationX(child));
final int right = left + mDivider.getIntrinsicHeight();
mDivider.setBounds(left, top, right, bottom);
mDivider.draw(c);
}
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) {
if (mOrientation == VERTICAL_LIST) {
outRect.set(0, 0, 0, mDivider.getIntrinsicHeight());
} else {
outRect.set(0, 0, mDivider.getIntrinsicWidth(), 0);
}
}
}
| apache-2.0 |
youxiaxiaomage/yx-platform | yx-platform-sys-facade/src/main/java/com/yx/platform/model/SysNews.java | 3286 | package com.yx.platform.model;
import java.io.Serializable;
import java.util.Date;
import com.baomidou.mybatisplus.annotations.TableField;
import com.baomidou.mybatisplus.annotations.TableName;
import com.yx.platform.core.base.BaseModel;
/**
* <p>
* 新闻表
* </p>
*/
@SuppressWarnings("serial")
@TableName("sys_news")
public class SysNews extends BaseModel
{
/**
* 新闻标题
*/
@TableField(value = "news_title")
private String newsTitle;
/**
* 新闻类型
*/
@TableField(value = "news_type")
private String newsType;
/**
* 发布时间
*/
@TableField(value = "send_time")
private Date sendTime;
/**
* 作者
*/
@TableField(value = "author_")
private String author;
/**
* 编辑
*/
@TableField(value = "editor_")
private String editor;
/**
* Tag标签
*/
@TableField(value = "tags_")
private String tags;
/**
* 关键字
*/
@TableField(value = "keys_")
private String keys;
/**
* 内容
*/
@TableField(value = "content_")
private String content;
/**
* 阅读次数
*/
@TableField(value = "reader_times")
private Integer readerTimes;
/**
* 发布状态
*/
@TableField(value = "status_")
private String status;
public String getNewsTitle()
{
return newsTitle;
}
public void setNewsTitle(String newsTitle)
{
this.newsTitle = newsTitle;
}
public String getNewsType()
{
return newsType;
}
public void setNewsType(String newsType)
{
this.newsType = newsType;
}
public Date getSendTime()
{
return sendTime;
}
public void setSendTime(Date sendTime)
{
this.sendTime = sendTime;
}
public String getAuthor()
{
return author;
}
public void setAuthor(String author)
{
this.author = author;
}
public String getEditor()
{
return editor;
}
public void setEditor(String editor)
{
this.editor = editor;
}
public String getTags()
{
return tags;
}
public void setTags(String tags)
{
this.tags = tags;
}
public String getKeys()
{
return keys;
}
public void setKeys(String keys)
{
this.keys = keys;
}
public String getContent()
{
return content;
}
public void setContent(String content)
{
this.content = content;
}
public Integer getReaderTimes()
{
return readerTimes;
}
public void setReaderTimes(Integer readerTimes)
{
this.readerTimes = readerTimes;
}
public String getStatus()
{
return status;
}
public void setStatus(String status)
{
this.status = status;
}
protected Serializable pkVal()
{
return getId();
}
}
| apache-2.0 |
dotph/registry | features/step_definitions/remove_host_address_steps.rb | 1029 | When /^I try to remove a host address from an existing host$/ do
host = FactoryGirl.create :host
host_address = FactoryGirl.create :host_address, host: host
stub_request(:get, 'http://localhost:9001/hosts/ns5.domains.ph')
.to_return(status: 200, body: 'hosts/ns5.domains.ph/get_response'.body)
stub_request(:delete, "http://localhost:9001/hosts/ns5.domains.ph/addresses/123.123.123.001")
.to_return(status: 200, body: 'hosts/ns5.domains.ph/addresses/delete_response'.body)
delete host_address_path(host.name, host_address.address)
end
Then /^host must no longer have host address$/ do
expect(last_response).to have_attributes status: 200
expect(json_response).to eq 'hosts/ns5.domains.ph/addresses/delete_response'.json
expect(Host.last.host_addresses).to be_empty
end
# Then /^remove host address must not be synced to external registries$/ do
# url = 'http://localhost:9001/hosts/ns5.domains.ph/addresses/123.123.123.001'
# expect(WebMock).not_to have_requested(:delete, url)
# end
| apache-2.0 |
blackpanther989/ArchiSteamFarm | ArchiSteamFarm/CMsgs/CMsgClientClanInviteAction.cs | 2009 | /*
_ _ _ ____ _ _____
/ \ _ __ ___ | |__ (_)/ ___| | |_ ___ __ _ _ __ ___ | ___|__ _ _ __ _ __ ___
/ _ \ | '__|/ __|| '_ \ | |\___ \ | __|/ _ \ / _` || '_ ` _ \ | |_ / _` || '__|| '_ ` _ \
/ ___ \ | | | (__ | | | || | ___) || |_| __/| (_| || | | | | || _|| (_| || | | | | | | |
/_/ \_\|_| \___||_| |_||_||____/ \__|\___| \__,_||_| |_| |_||_| \__,_||_| |_| |_| |_|
Copyright 2015-2017 Łukasz "JustArchi" Domeradzki
Contact: JustArchi@JustArchi.net
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System.IO;
using SteamKit2;
using SteamKit2.Internal;
namespace ArchiSteamFarm.CMsgs {
internal sealed class CMsgClientClanInviteAction : ISteamSerializableMessage {
internal bool AcceptInvite { private get; set; }
internal ulong ClanID { private get; set; }
void ISteamSerializable.Deserialize(Stream stream) {
if (stream == null) {
ASF.ArchiLogger.LogNullError(nameof(stream));
return;
}
BinaryReader binaryReader = new BinaryReader(stream);
ClanID = binaryReader.ReadUInt64();
AcceptInvite = binaryReader.ReadBoolean();
}
EMsg ISteamSerializableMessage.GetEMsg() => EMsg.ClientAcknowledgeClanInvite;
void ISteamSerializable.Serialize(Stream stream) {
if (stream == null) {
ASF.ArchiLogger.LogNullError(nameof(stream));
return;
}
BinaryWriter binaryWriter = new BinaryWriter(stream);
binaryWriter.Write(ClanID);
binaryWriter.Write(AcceptInvite);
}
}
} | apache-2.0 |
Talend/ui | packages/containers/src/ObjectViewer/index.js | 81 | import ObjectViewer from './ObjectViewer.connect';
export default ObjectViewer;
| apache-2.0 |
KleeGroup/vertigo-addons | vertigo-stella/src/test/java/io/vertigo/stella/work/mock/SlowWorkEngine.java | 1367 | /**
* vertigo - simple java starter
*
* Copyright (C) 2013-2018, KleeGroup, direction.technique@kleegroup.com (http://www.kleegroup.com)
* KleeGroup, Centre d'affaire la Boursidiere - BP 159 - 92357 Le Plessis Robinson Cedex - France
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vertigo.stella.work.mock;
import io.vertigo.stella.work.WorkEngine;
public final class SlowWorkEngine implements WorkEngine<SlowWork, Boolean> {
/** {@inheritDoc} */
@Override
public Boolean process(final SlowWork work) {
try {
final long sleepTimeAvg = work.getSleepTime();
final long sleepTimeMax = Math.round(sleepTimeAvg * 1.1d); //+10%
final long sleepTimeMin = Math.round(sleepTimeAvg * 0.9d); //-10%
Thread.sleep((sleepTimeMax + sleepTimeMin) / 2);
} catch (final InterruptedException e) {
return false;
}
return true;
}
}
| apache-2.0 |
codeshard/videomorph | tests/test_utils.py | 2480 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File name: test_utils.py
#
# VideoMorph - A PyQt5 frontend to ffmpeg and avconv.
# Copyright 2015-2016 VideoMorph Development Team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module provides tests for utils.py module."""
import nose
from videomorph.converter import utils
@nose.tools.raises(ValueError)
def test_convert_none():
"""Convert None."""
assert utils.write_time(None)
@nose.tools.raises(ValueError)
def test_convert_wrong_data_types():
"""Convert wrong data types."""
assert utils.write_time('string')
assert utils.write_time((1, 2))
assert utils.write_time([1, 2])
assert utils.write_time({'time': 25})
def test_convert_0():
"""Convert 0."""
assert utils.write_time(0) == '00s'
def test_convert_2100():
"""Convert 2100."""
assert utils.write_time(2100) == '35m:00s'
def test_convert_3600():
"""Convert 3600."""
assert utils.write_time(3600) == '01h:00m:00s'
def test_convert_3659():
"""Convert 3659."""
assert utils.write_time(3659) == '01h:00m:59s'
def test_convert_3661():
"""Convert 3661."""
assert utils.write_time(3661) == '01h:01m:01s'
@nose.tools.raises(ValueError)
def test_raise_value_error():
"""Test for negative time value (raises a ValueError)."""
utils.write_time(-1)
def test_which_existing_app():
"""Test for an existing app."""
assert utils.which('ls') == '/bin/ls' # Depends on your system
def test_which_non_existing_app():
"""Test for a non existing app."""
assert utils.which('hypothetical_app') is None
@nose.tools.raises(ValueError)
def test_which_null_arg():
"""Test for a null string param (raises a ValueError)."""
utils.which('')
def test_get_locale():
"""Test get_locale."""
from locale import getdefaultlocale
assert utils.get_locale() == getdefaultlocale()[0] or 'es_ES'
if __name__ == '__main__':
nose.runmodule()
| apache-2.0 |
consulo/consulo | modules/base/xdebugger-impl/src/main/java/com/intellij/xdebugger/impl/frame/actions/XSwitchWatchesInVariables.java | 890 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.xdebugger.impl.frame.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.xdebugger.impl.ui.XDebugSessionTab;
import javax.annotation.Nonnull;
public class XSwitchWatchesInVariables extends ToggleAction {
@Override
public boolean isSelected(@Nonnull AnActionEvent e) {
XDebugSessionTab tab = e.getData(XDebugSessionTab.TAB_KEY);
return tab == null || tab.isWatchesInVariables();
}
@Override
public void setSelected(@Nonnull AnActionEvent e, boolean state) {
XDebugSessionTab tab = e.getData(XDebugSessionTab.TAB_KEY);
if (tab != null) {
tab.setWatchesInVariables(!tab.isWatchesInVariables());
}
}
}
| apache-2.0 |
jonhnanthan/Educa | educa/src/com/educa/adapter/ExerciseStudentAdapter.java | 7876 |
package com.educa.adapter;
import java.util.ArrayList;
import org.json.JSONException;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.PopupMenu;
import android.widget.TextView;
import com.educa.R;
import com.educa.activity.AnswerColorMatchExercise;
import com.educa.activity.AnswerCompleteExercise;
import com.educa.activity.AnswerMultipleChoiceExercise;
import com.educa.database.DataBaseProfessor;
public class ExerciseStudentAdapter extends BaseAdapter {
private static ArrayList<String> mListExercise;
private LayoutInflater mInflater;
private Context mcontext;
private Activity parentActivity;
public ExerciseStudentAdapter() {
}
public ExerciseStudentAdapter(Context context, ArrayList<String> listExercise,
Activity parentActivity) {
mListExercise = listExercise;
mInflater = LayoutInflater.from(context);
mcontext = context;
this.parentActivity = parentActivity;
}
@Override
public int getCount() {
return mListExercise.size();
}
@Override
public String getItem(int position) {
return mListExercise.get(position);
}
@Override
public long getItemId(int index) {
return index;
}
@SuppressLint({ "ViewHolder", "InflateParams" }) @Override
public View getView(final int position, View view, ViewGroup viewGroup) {
view = mInflater.inflate(R.layout.exercise_adapter_student_item, null);
TextView tv_exercise_name = (TextView) view.findViewById(R.id.tv_exercise_name);
TextView tv_exercise_status = (TextView) view.findViewById(R.id.tv_exercise_status);
TextView tv_exercise_correction = (TextView) view.findViewById(R.id.tv_exercise_correction);
TextView tv_exercise_date = (TextView) view.findViewById(R.id.tv_exercise_date);
ImageView icon = (ImageView) view.findViewById(R.id.imageView1);
final String json = mListExercise.get(position);
JSONObject exercise;
try {
exercise = new JSONObject(json);
tv_exercise_name.setText(exercise.getString("name"));
tv_exercise_status.setText(exercise.getString("status"));
tv_exercise_correction.setText(exercise.getString("correction"));
tv_exercise_date.setText(exercise.getString("date"));
if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).COLOR_MATCH_EXERCISE_TYPECODE)) {
icon.setImageResource(R.drawable.colorthumb);
} else if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).MULTIPLE_CHOICE_EXERCISE_TYPECODE)) {
icon.setImageResource(R.drawable.multiplethumb);
} else if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).COMPLETE_EXERCISE_TYPECODE)) {
icon.setImageResource(R.drawable.completethumb);
}
} catch (JSONException e) {
Log.e("CREATE VIEW STUDENT ERROR", e.getMessage());
}
ImageView bt_options = (ImageView)
view.findViewById(R.id.bt_options);
bt_options.setOnClickListener(new
View.OnClickListener() {
@Override
public void onClick(View v) {
showPopupMenu(v, json);
}
});
return view;
}
private void showPopupMenu(View v, final String json) {
PopupMenu popupMenu = new PopupMenu(mcontext, v);
popupMenu.getMenuInflater().inflate(R.menu.student_exercise_options, popupMenu.getMenu());
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case R.id.answer:
JSONObject exercise;
try {
exercise = new JSONObject(json);
if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).MULTIPLE_CHOICE_EXERCISE_TYPECODE)) {
ArrayList<CharSequence> listMultipleChoiceExercise = new ArrayList<CharSequence>();
listMultipleChoiceExercise.add(exercise.getString("name"));
listMultipleChoiceExercise.add(exercise.getString("question"));
listMultipleChoiceExercise.add(exercise.getString("alternative1"));
listMultipleChoiceExercise.add(exercise.getString("alternative2"));
listMultipleChoiceExercise.add(exercise.getString("alternative3"));
listMultipleChoiceExercise.add(exercise.getString("alternative4"));
listMultipleChoiceExercise.add(exercise.getString("answer"));
listMultipleChoiceExercise.add(exercise.getString("date"));
Intent i = new Intent(parentActivity, AnswerMultipleChoiceExercise.class);
i.putCharSequenceArrayListExtra("QuestionToAnswerMatch", listMultipleChoiceExercise);
parentActivity.startActivity(i);
}
if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).COMPLETE_EXERCISE_TYPECODE)) {
ArrayList<CharSequence> listCompleteExercise = new ArrayList<CharSequence>();
listCompleteExercise.add(exercise.getString("name"));
listCompleteExercise.add(exercise.getString("word"));
listCompleteExercise.add(exercise.getString("question"));
listCompleteExercise.add(exercise.getString("hiddenIndexes"));
listCompleteExercise.add(exercise.getString("date"));
Intent i = new Intent(parentActivity, AnswerCompleteExercise.class);
i.putCharSequenceArrayListExtra("QuestionToAnswerComplete", listCompleteExercise);
parentActivity.startActivity(i);
}
if (exercise.getString("type").equals(DataBaseProfessor.getInstance(mcontext).COLOR_MATCH_EXERCISE_TYPECODE)) {
ArrayList<CharSequence> listColorMatchExercise = new ArrayList<CharSequence>();
listColorMatchExercise.add(exercise.getString("name"));
listColorMatchExercise.add(exercise.getString("color"));
listColorMatchExercise.add(exercise.getString("question"));
listColorMatchExercise.add(exercise.getString("alternative1"));
listColorMatchExercise.add(exercise.getString("alternative2"));
listColorMatchExercise.add(exercise.getString("alternative3"));
listColorMatchExercise.add(exercise.getString("alternative4"));
listColorMatchExercise.add(exercise.getString("answer"));
listColorMatchExercise.add(exercise.getString("date"));
Intent i = new Intent(parentActivity, AnswerColorMatchExercise.class);
i.putCharSequenceArrayListExtra("QuestionToAnswerColor", listColorMatchExercise);
parentActivity.startActivity(i);
}
} catch (JSONException e) {
Log.e("EDIT ERROR", e.getMessage());
}
return true;
default:
break;
}
return true;
}
});
popupMenu.show();
}
}
| apache-2.0 |
ITrust/angular-xmlrpc | src/xmlrpc.js | 19270 | // Fix an IE problem (another one)
var HAS_ACTIVEX = false;
try {
new ActiveXObject('MSXML2.DOMDocument');
HAS_ACTIVEX = true;
} catch(e) {
// Do nothing
}
/**
* XML-RPC communication service.
*/
angular.module('xml-rpc', [])
.factory('js2xml', ['helperXmlRpc', function(helperXmlRpc){
/**
* Convert Null to XmlRpc valid value (as xml element)
*/
function null2xml_(doc) {
return helperXmlRpc.createNode(doc, 'nil');
}
var js2xmlMethod_ = {};
/**
* Convert a string to a valid xmlrpc value (as xml element).
*/
function string2xml_(doc, input) {
return helperXmlRpc.createNode(doc, 'string', input);
}
js2xmlMethod_['string'] = string2xml_;
/**
* Convert a number to a valid xmlrpc value (as xml element).
*/
function number2xml_(doc, input) {
var type = 'int',
value = parseInt(input),
f = parseFloat(input);
if (value != f) {
type = 'double';
value = f;
}
return helperXmlRpc.createNode(doc, type, value.toString());
}
js2xmlMethod_['number'] = number2xml_;
/**
* Convert a boolean to a valid xmlrpc value (as xml element).
*/
function boolean2xml_(doc, input) {
return helperXmlRpc.createNode(doc, 'boolean', (input ? '1' : '0'));
}
js2xmlMethod_['boolean'] = boolean2xml_;
/**
* Convert an Array object to a valid xmlrpc value (as xml element).
*/
function array2xml_(doc, input) {
var elements = [];
for (var i=0; i < input.length; i++) {
elements.push(js2xml_(doc, input[i]));
}
return helperXmlRpc.createNode(doc, 'array',
helperXmlRpc.createNode(doc, 'data', elements)
);
}
js2xmlMethod_['array'] = array2xml_;
/**
* Convert an object to a valid xmlrpc value (as xml element).
*/
function struct2xml_(doc, input) {
var elements = [];
for (var name in input) {
elements.push(helperXmlRpc.createNode(doc, 'member',
helperXmlRpc.createNode(doc, 'name', name),
js2xml_(doc, input[name])
));
}
return helperXmlRpc.createNode(doc, 'struct', elements);
}
js2xmlMethod_['object'] = struct2xml_;
/**
* Convert a DateTime object to a valid xmlrpc value (as xml element).
*/
function date2xml_(doc, input) {
var str = [
input.getFullYear(),
(input.getMonth() + 1 < 10)? '0' + (input.getMonth() + 1):input.getMonth() + 1,
(input.getDate() < 10)? '0' + (input.getDate()):input.getDate(),
'T',
(input.getHours() < 10)? '0' + (input.getHours()):input.getHours(), ':',
(input.getMinutes() < 10)? '0' + (input.getMinutes()):input.getMinutes(), ':',
(input.getSeconds() < 10)? '0' + (input.getSeconds()):input.getSeconds()
];
return helperXmlRpc.createNode(doc, 'dateTime.iso8601', str.join(''));
}
js2xmlMethod_['date'] = date2xml_;
/**
* Convert a typed array to base64 xml encoding
*/
function uint8array2xml_(doc, input) {
var base64 = btoa(String.fromCharCode.apply(null, input));
return helperXmlRpc.createNode(doc, 'base64', base64);
}
js2xmlMethod_['uint8array'] = uint8array2xml_;
/**
* Returns the object type of complex javascript objects
*/
function type_(obj){
return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase();
}
/**
* Converts a javascript object to a valid xmlrpc value (as xml element).
*/
function js2xml_(doc, input) {
var type = type_(input)
var method = js2xmlMethod_[type];
if (input === null) {
method = null2xml_;
} else if (method == undefined) {
method = string2xml_;
}
return helperXmlRpc.createNode(doc, 'value', method(doc, input));
}
return {
js2xml:js2xml_
};
}])
.factory('xml2js', ['helperXmlRpc', function(helperXmlRpc){
var isTrue_ = {
'1': true,
'true': true
};
var xml2jsMethod_ = {};
/**
* Convert an xmlrpc string value (as an xml tree) to a javascript string.
*/
function xml2null_() {
return null;
}
xml2jsMethod_['nil'] = xml2null_;
/**
* Convert an xmlrpc string value (as an xml tree) to a javascript string.
*
* @param {!Element} input Xmlrpc string to convert.
* @return {string} Javascript conversion of input.
* @protected
*/
function xml2string_(input) {
var buf = [];
helperXmlRpc.getTextContent(input, buf, false);
return buf.join('');
}
xml2jsMethod_['string'] = xml2string_;
xml2jsMethod_['base64'] = xml2string_;
/**
* Convert an xmlrpc number (int or double) value to a javascript number.
*/
function xml2number_(input) {
return parseFloat(helperXmlRpc.getTextContent(input, []));
}
xml2jsMethod_['int'] = xml2number_;
xml2jsMethod_['i8'] = xml2number_;
xml2jsMethod_['i4'] = xml2number_;
xml2jsMethod_['double'] = xml2number_;
/**
* Convert an xmlrpc boolean value to a javascript boolean.
*/
function xml2boolean_(input) {
var value = helperXmlRpc.getTextContent(input, []).toLowerCase();
return isTrue_[value] || false;
}
xml2jsMethod_['boolean'] = xml2boolean_;
/**
* Convert an xmlrpc struct value to a javascript object.
*/
function xml2struct_(input) {
var memberNodes = helperXmlRpc.selectNodes(input, 'member') || [];
var obj = {};
for (var i=0; i < memberNodes.length; i++) {
var node = helperXmlRpc.selectSingleNode(memberNodes[i], 'name');
// If no name found, member is ignored
if (node) {
var label = helperXmlRpc.getTextContent(node, []);
node = helperXmlRpc.selectSingleNode(memberNodes[i], 'value');
obj[label] = xml2js_(node);
}
}
return obj;
}
xml2jsMethod_['struct'] = xml2struct_;
/**
* Convert an xmlrpc array value to a javascript array.
*/
function xml2array_(input) {
var valueNodes = helperXmlRpc.selectNodes(input, 'data/value');
if (!valueNodes.length) {
valueNodes = helperXmlRpc.selectNodes(input, './value');
}
if (!valueNodes.length)
return [];
//else
var map_ = (Array.prototype.map) ?
function(arr, f, opt_obj) {
return Array.prototype.map.call(arr, f, opt_obj);
} :
function(arr, f, opt_obj) {
var l = arr.length; // must be fixed during loop... see docs
var res = new Array(l);
var arr2 = (typeof arr == 'string') ? arr.split('') : arr;
for (var i = 0; i < l; i++) {
if (i in arr2) {
res[i] = f.call(opt_obj, arr2[i], i, arr);
}
}
return res;
};
return map_(valueNodes, xml2js_);
}
xml2jsMethod_['array'] = xml2array_;
/**
* Convert an xmlrpc dateTime value to an itrust.date.DateTime.
*/
function xml2datetime_(input) {
var value = helperXmlRpc.getTextContent(input, []);
if (!value) {
return new Date();
}
if (value[value.length-1]=='T') {
value = value.substring(0, value.length-1);
}
var parts = value.match(/\d+/g);
if(value.indexOf('-') == -1){
var toSplit = parts[0];
parts[0] = toSplit.substring(0,4);
parts.splice(1, 0, toSplit.substring(4,6));
parts.splice(2, 0, toSplit.substring(6));
}
return new Date(parts[0], parts[1] - 1, parts[2], parts[3], parts[4], parts[5]);
}
xml2jsMethod_['datetime'] = xml2datetime_;
xml2jsMethod_['datetime.iso8601'] = xml2datetime_;
/**
* Convert an xmlrpc value (as an xml tree) to a javascript object.
*/
function xml2js_(input) {
var elt = helperXmlRpc.selectSingleNode(input, './*');
if (!elt)
return null;
//else
var method = xml2jsMethod_[elt.nodeName.toLowerCase()];
if (method == undefined) {
method = xml2struct_;
}
return method(elt);
}
return {
xml2js:xml2js_
};
}])
.factory('xmlrpc', ['$http', '$q', 'helperXmlRpc', 'js2xml', 'xml2js', function($http, $q, helperXmlRpc, js2xml, xml2js){
var configuration = {};
/**
* Serialize a XML document to string.
*/
function serialize(xml){
var text = xml.xml;
if (text) {
return text;
}
if (typeof XMLSerializer != 'undefined') {
return new XMLSerializer().serializeToString(xml);
}
throw Error('Your browser does not support serializing XML documents');
}
/**
* Creates a xmlrpc call of the given method with given params.
*/
function createCall(method, params){
var doc = helperXmlRpc.createDocument('methodCall');
doc.firstChild.appendChild(
helperXmlRpc.createNode(doc, 'methodName', method)
);
if (arguments.length > 2) {
params = helperXmlRpc.cloneArray(arguments);
params.shift();
}
if (params && params.length > 0) {
var paramsNode = helperXmlRpc.createNode(doc, 'params');
for (var i=0; i < params.length; i++) {
paramsNode.appendChild(helperXmlRpc.createNode(doc, 'param',
js2xml.js2xml(doc, params[i])
));
}
doc.firstChild.appendChild(paramsNode);
}
return (serialize(doc)).replace(/[\s\xa0]+$/, '');
}
// Use the promise system from angular.
// This method return a promise with the response
function callMethod(method, params){
var xmlstr = createCall(method, params);
var targetAddr = configuration.hostName + "" + configuration.pathName;
return $http.post(targetAddr, xmlstr, {headers: {'Content-Type': 'text/xml'}})
.then(function(responseFromServer) {
var responseText = responseFromServer.data;
var response = null;
try {
response = parseResponse(responseText);
} catch (err) {
return $q.reject(err)
}
return $q.resolve(response);
}).catch(function(responseFromServer){
if(responseFromServer.status in configuration){
if(typeof configuration[responseFromServer.status] == "function"){
configuration[responseFromServer.status].call();
}
}
return $q.reject(responseFromServer)
});
}
/**
* Parse an xmlrpc response and return the js object.
*/
function parseResponse(response){
var doc = helperXmlRpc.loadXml(response);
var rootNode = doc.firstChild;
if (!rootNode)
return undefined;
//else
var node = helperXmlRpc.selectSingleNode(rootNode, '//fault');
var isFault = (node != undefined);
node = helperXmlRpc.selectSingleNode(rootNode, '//value');
var value = xml2js.xml2js(node);
if (isFault) {
throw value;
}
//else
return value;
}
/**
* Configure the service (Host name and service path).
* Actually, 401, 404 and 500 server errors are originally defined, but any error code can be added
*/
function config(conf) {
angular.extend(configuration, {
hostName:"",
pathName:"/rpc2",
500:function(){},
401:function(){},
404:function(){}
}, conf);
}
config();
return {
callMethod : callMethod,
config : config
};
}])
.factory('helperXmlRpc', function(){
/**
* Clones an array object
*/
function cloneArray_(object){
var length = object.length;
if (length > 0) {
var rv = new Array(length);
for (var i = 0; i < length; i++) {
rv[i] = object[i];
}
return rv;
}
return [];
}
/**
* Creates a XML document for IEs browsers
*/
function createMsXmlDocument_(){
var doc = new ActiveXObject('MSXML2.DOMDocument');
if (doc) {
doc.resolveExternals = false;
doc.validateOnParse = false;
try {
doc.setProperty('ProhibitDTD', true);
doc.setProperty('MaxXMLSize', 2 * 1024);
doc.setProperty('MaxElementDepth', 256);
} catch (e) {
// No-op.
}
}
return doc;
}
/**
* Creates a XML document
*/
function createDocument(opt_rootTagName, opt_namespaceUri){
if (opt_namespaceUri && !opt_rootTagName) {
throw Error("Can't create document with namespace and no root tag");
}
if (HAS_ACTIVEX) {
var doc = createMsXmlDocument_();
if (doc) {
if (opt_rootTagName) {
doc.appendChild(doc.createNode(1,
opt_rootTagName,
opt_namespaceUri || ''));
}
return doc;
}
}
else if (document.implementation && document.implementation.createDocument) {
return document.implementation.createDocument(opt_namespaceUri || '',
opt_rootTagName || '',
null);
}
throw Error('Your browser does not support creating new documents');
}
/**
* Returns the object type of complex javascript objects
*/
function type_(obj){
return Object.prototype.toString.call(obj).slice(8, -1).toLowerCase();
}
/**
* Creates a XML node and set the child(ren) node(s)
*/
function createNode(doc, nodeName, children){
var elt = doc.createElement(nodeName);
var appendChild = function(child) {
if(type_(child) === 'object' && child.nodeType !== 1){
for(var i in child){
elt.appendChild(
(typeof child == 'string') ? doc.createTextNode(child[i]) : child[i]
);
}
} else {
elt.appendChild(
(typeof child == 'string') ? doc.createTextNode(child) : child
);
}
}
if (arguments.length > 3) {
children = cloneArray_(arguments);
children.shift(); //shift doc
children.shift(); //shift nodeName
}
if (Array.isArray(children)) {
angular.forEach(children, appendChild);
} else if (children) {
appendChild(children);
}
return elt;
}
/**
* Generate an ID for XMLRPC request
*/
function generateId(){
return 'xmlrpc-'+(new Date().getTime())+'-'+Math.floor(Math.random()*1000);
}
/**
* Creates an XML document from a string
*/
function loadXml_(xml) {
if (HAS_ACTIVEX) {
var doc = createMsXmlDocument_();
doc.loadXML(xml);
return doc;
}
else if (typeof DOMParser != 'undefined') {
return new DOMParser().parseFromString(xml, 'application/xml');
}
throw Error('Your browser does not support loading xml documents');
}
/**
* Returns the document in which the node is.
*/
function getOwnerDocument_(node) {
return (
node.nodeType == 9 ? node :
node.ownerDocument || node.document);
}
/**
* Return a single node with the given name in the given node
*/
function selectSingleNode_(node, path) {
var doc = getOwnerDocument_(node);
if (typeof node.selectSingleNode != 'undefined') {
if (typeof doc.setProperty != 'undefined') {
doc.setProperty('SelectionLanguage', 'XPath');
}
return node.selectSingleNode(path);
} else if (document.implementation.hasFeature('XPath', '3.0')) {
var resolver = doc.createNSResolver(doc.documentElement),
result = doc.evaluate(path, node, resolver,
XPathResult.FIRST_ORDERED_NODE_TYPE, null);
return result.singleNodeValue;
}
return null;
}
/**
* Returns the string content of a node
*/
function getTextContent_(node, buf, normalizedWhitespace){
var PREDEFINED_TAG_VALUES_ = {'IMG': ' ', 'BR': '\n'};
if (node.nodeName in ['SCRIPT', 'STYLE', 'HEAD', 'IFRAME', 'OBJECT']) {
// ignore certain tags
} else if (node.nodeType == 3) {
if (normalizedWhitespace) {
buf.push(String(node.nodeValue).replace(/(\r\n|\r|\n)/g, ''));
} else {
buf.push(node.nodeValue);
}
} else if (node.nodeName in PREDEFINED_TAG_VALUES_) {
buf.push(PREDEFINED_TAG_VALUES_[node.nodeName]);
} else {
var child = node.firstChild;
while (child) {
getTextContent_(child, buf, normalizedWhitespace);
child = child.nextSibling;
}
}
return buf.join('');
}
/**
* Returns all the nodes in a array that are inside the given node with the given path
*/
function selectNodes_(node, path) {
var doc = getOwnerDocument_(node);
if (typeof node.selectNodes != 'undefined') {
if (typeof doc.setProperty != 'undefined') {
doc.setProperty('SelectionLanguage', 'XPath');
}
return node.selectNodes(path);
} else if (document.implementation.hasFeature('XPath', '3.0')) {
var resolver = doc.createNSResolver(doc.documentElement),
nodes = doc.evaluate(path, node, resolver,
XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null),
results = [],
count = nodes.snapshotLength;
for (var i = 0; i < count; i++) {
results.push(nodes.snapshotItem(i));
}
return results;
} else {
return [];
}
}
return {
cloneArray:cloneArray_,
createDocument: createDocument,
createNode: createNode,
generateId: generateId,
loadXml: loadXml_,
getOwnerDocument:getOwnerDocument_,
selectNodes: selectNodes_,
getTextContent : getTextContent_,
selectSingleNode: selectSingleNode_
};
});
| apache-2.0 |