code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
package org.grobid.core.engines.config;
/**
* Exception for invalid configs
*/
public class InvalidGrobidAnalysisConfig extends RuntimeException {
public InvalidGrobidAnalysisConfig(String message) {
super(message);
}
}
| kermitt2/grobid | grobid-core/src/main/java/org/grobid/core/engines/config/InvalidGrobidAnalysisConfig.java | Java | apache-2.0 | 239 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.quotas;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
/**
* A {@link ScheduledChore} which periodically updates the {@link RegionServerSpaceQuotaManager}
* with information from the hbase:quota.
*/
@InterfaceAudience.Private
public class SpaceQuotaRefresherChore extends ScheduledChore {
private static final Log LOG = LogFactory.getLog(SpaceQuotaRefresherChore.class);
static final String POLICY_REFRESHER_CHORE_PERIOD_KEY =
"hbase.regionserver.quotas.policy.refresher.chore.period";
static final int POLICY_REFRESHER_CHORE_PERIOD_DEFAULT = 1000 * 60 * 1; // 1 minute in millis
static final String POLICY_REFRESHER_CHORE_DELAY_KEY =
"hbase.regionserver.quotas.policy.refresher.chore.delay";
static final long POLICY_REFRESHER_CHORE_DELAY_DEFAULT = 1000L * 15L; // 15 seconds in millis
static final String POLICY_REFRESHER_CHORE_TIMEUNIT_KEY =
"hbase.regionserver.quotas.policy.refresher.chore.timeunit";
static final String POLICY_REFRESHER_CHORE_TIMEUNIT_DEFAULT = TimeUnit.MILLISECONDS.name();
static final String POLICY_REFRESHER_CHORE_REPORT_PERCENT_KEY =
"hbase.regionserver.quotas.policy.refresher.report.percent";
static final double POLICY_REFRESHER_CHORE_REPORT_PERCENT_DEFAULT= 0.95;
private final RegionServerSpaceQuotaManager manager;
private final Connection conn;
public SpaceQuotaRefresherChore(RegionServerSpaceQuotaManager manager, Connection conn) {
super(SpaceQuotaRefresherChore.class.getSimpleName(),
manager.getRegionServerServices(),
getPeriod(manager.getRegionServerServices().getConfiguration()),
getInitialDelay(manager.getRegionServerServices().getConfiguration()),
getTimeUnit(manager.getRegionServerServices().getConfiguration()));
this.manager = manager;
this.conn = conn;
}
@Override
protected void chore() {
try {
if (LOG.isTraceEnabled()) {
LOG.trace("Reading current quota snapshots from hbase:quota.");
}
// Get the snapshots that the quota manager is currently aware of
final Map<TableName, SpaceQuotaSnapshot> currentSnapshots =
getManager().copyQuotaSnapshots();
// Read the new snapshots from the quota table
final Map<TableName, SpaceQuotaSnapshot> newSnapshots = fetchSnapshotsFromQuotaTable();
if (LOG.isTraceEnabled()) {
LOG.trace(currentSnapshots.size() + " table quota snapshots are collected, "
+ "read " + newSnapshots.size() + " from the quota table.");
}
// Iterate over each new quota snapshot
for (Entry<TableName, SpaceQuotaSnapshot> entry : newSnapshots.entrySet()) {
final TableName tableName = entry.getKey();
final SpaceQuotaSnapshot newSnapshot = entry.getValue();
// May be null!
final SpaceQuotaSnapshot currentSnapshot = currentSnapshots.get(tableName);
if (LOG.isTraceEnabled()) {
LOG.trace(tableName + ": current=" + currentSnapshot + ", new=" + newSnapshot);
}
if (!newSnapshot.equals(currentSnapshot)) {
// We have a new snapshot. We might need to enforce it or disable the enforcement
if (!isInViolation(currentSnapshot) && newSnapshot.getQuotaStatus().isInViolation()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Enabling " + newSnapshot + " on " + tableName);
}
getManager().enforceViolationPolicy(tableName, newSnapshot);
}
if (isInViolation(currentSnapshot) && !newSnapshot.getQuotaStatus().isInViolation()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Removing quota violation policy on " + tableName);
}
getManager().disableViolationPolicyEnforcement(tableName);
}
}
}
// We're intentionally ignoring anything extra with the currentSnapshots. If we were missing
// information from the RegionServers to create an accurate SpaceQuotaSnapshot in the Master,
// the Master will generate a new SpaceQuotaSnapshot which represents this state. This lets
// us avoid having to do anything special with currentSnapshots here.
// Update the snapshots in the manager
getManager().updateQuotaSnapshot(newSnapshots);
} catch (IOException e) {
LOG.warn(
"Caught exception while refreshing enforced quota violation policies, will retry.", e);
}
}
/**
* Checks if the given <code>snapshot</code> is in violation, allowing the snapshot to be null.
* If the snapshot is null, this is interpreted as no snapshot which implies not in violation.
*
* @param snapshot The snapshot to operate on.
* @return true if the snapshot is in violation, false otherwise.
*/
boolean isInViolation(SpaceQuotaSnapshot snapshot) {
if (snapshot == null) {
return false;
}
return snapshot.getQuotaStatus().isInViolation();
}
/**
* Reads all quota snapshots from the quota table.
*
* @return The current "view" of space use by each table.
*/
public Map<TableName, SpaceQuotaSnapshot> fetchSnapshotsFromQuotaTable() throws IOException {
try (Table quotaTable = getConnection().getTable(QuotaUtil.QUOTA_TABLE_NAME);
ResultScanner scanner = quotaTable.getScanner(QuotaTableUtil.makeQuotaSnapshotScan())) {
Map<TableName,SpaceQuotaSnapshot> snapshots = new HashMap<>();
for (Result result : scanner) {
try {
extractQuotaSnapshot(result, snapshots);
} catch (IllegalArgumentException e) {
final String msg = "Failed to parse result for row " + Bytes.toString(result.getRow());
LOG.error(msg, e);
throw new IOException(msg, e);
}
}
return snapshots;
}
}
/**
* Wrapper around {@link QuotaTableUtil#extractQuotaSnapshot(Result, Map)} for testing.
*/
void extractQuotaSnapshot(Result result, Map<TableName,SpaceQuotaSnapshot> snapshots) {
QuotaTableUtil.extractQuotaSnapshot(result, snapshots);
}
Connection getConnection() {
return conn;
}
RegionServerSpaceQuotaManager getManager() {
return manager;
}
/**
* Extracts the period for the chore from the configuration.
*
* @param conf The configuration object.
* @return The configured chore period or the default value.
*/
static int getPeriod(Configuration conf) {
return conf.getInt(POLICY_REFRESHER_CHORE_PERIOD_KEY,
POLICY_REFRESHER_CHORE_PERIOD_DEFAULT);
}
/**
* Extracts the initial delay for the chore from the configuration.
*
* @param conf The configuration object.
* @return The configured chore initial delay or the default value.
*/
static long getInitialDelay(Configuration conf) {
return conf.getLong(POLICY_REFRESHER_CHORE_DELAY_KEY,
POLICY_REFRESHER_CHORE_DELAY_DEFAULT);
}
/**
* Extracts the time unit for the chore period and initial delay from the configuration. The
* configuration value for {@link #POLICY_REFRESHER_CHORE_TIMEUNIT_KEY} must correspond to
* a {@link TimeUnit} value.
*
* @param conf The configuration object.
* @return The configured time unit for the chore period and initial delay or the default value.
*/
static TimeUnit getTimeUnit(Configuration conf) {
return TimeUnit.valueOf(conf.get(POLICY_REFRESHER_CHORE_TIMEUNIT_KEY,
POLICY_REFRESHER_CHORE_TIMEUNIT_DEFAULT));
}
/**
* Extracts the percent of Regions for a table to have been reported to enable quota violation
* state change.
*
* @param conf The configuration object.
* @return The percent of regions reported to use.
*/
static Double getRegionReportPercent(Configuration conf) {
return conf.getDouble(POLICY_REFRESHER_CHORE_REPORT_PERCENT_KEY,
POLICY_REFRESHER_CHORE_REPORT_PERCENT_DEFAULT);
}
}
| gustavoanatoly/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/quotas/SpaceQuotaRefresherChore.java | Java | apache-2.0 | 9,262 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Primitives;
namespace Microsoft.AspNetCore.Server.IISIntegration
{
internal class ForwardedTlsConnectionFeature : ITlsConnectionFeature
{
private StringValues _header;
private X509Certificate2? _certificate;
private readonly ILogger _logger;
public ForwardedTlsConnectionFeature(ILogger logger, StringValues header)
{
_logger = logger;
_header = header;
}
public X509Certificate2? ClientCertificate
{
get
{
if (_certificate == null && _header != StringValues.Empty)
{
try
{
var bytes = Convert.FromBase64String(_header.ToString());
_certificate = new X509Certificate2(bytes);
}
catch (Exception ex)
{
_logger.LogWarning(0, ex, "Failed to read the client certificate.");
}
}
return _certificate;
}
set
{
_certificate = value;
_header = StringValues.Empty;
}
}
public Task<X509Certificate2?> GetClientCertificateAsync(CancellationToken cancellationToken)
{
return Task.FromResult(ClientCertificate);
}
}
}
| aspnet/AspNetCore | src/Servers/IIS/IISIntegration/src/ForwardedTlsConnectionFeature.cs | C# | apache-2.0 | 1,776 |
# frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require_relative '../../aws-sdk-core/spec/shared_spec_helper'
$:.unshift(File.expand_path('../../lib', __FILE__))
$:.unshift(File.expand_path('../../../aws-sdk-core/lib', __FILE__))
$:.unshift(File.expand_path('../../../aws-sigv4/lib', __FILE__))
require 'rspec'
require 'webmock/rspec'
require 'aws-sdk-lexmodelsv2'
| aws/aws-sdk-ruby | gems/aws-sdk-lexmodelsv2/spec/spec_helper.rb | Ruby | apache-2.0 | 561 |
using System.IO;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Hosting;
namespace hutel
{
public static class Program
{
public static void Main(string[] args)
{
CreateHostBuilder(args).Build().Run();
}
public static IHostBuilder CreateHostBuilder(string[] args) =>
Host.CreateDefaultBuilder(args)
.UseContentRoot(Directory.GetCurrentDirectory())
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseStartup<Startup>();
});
}
}
| demyanenko/hutel | server/Program.cs | C# | apache-2.0 | 601 |
class ActionError(RuntimeError):
"""
Raise when something went wrong when doing an action.
"""
class NoValidSCVError(ActionError):
"""
Raise when no valid scv can be selected according to defined rules.
"""
class NoValidBuildingLocationError(ActionError):
"""
Raise when no valid location to build a building is found in the current screen.
"""
class NoUnitError(ActionError):
"""
Raise when a unit or building is not present on screen whereas a function is asked to find one.
"""
class NoValidBuildingPointError(ActionError):
"""
TODO: give a more precise description than the name of the error
""" | Xaxetrov/OSCAR | oscar/meta_action/meta_action_error.py | Python | apache-2.0 | 667 |
var mustInputDM = 'Hãy nhập "tên danh mục" !';
var mustNumber = '"Thứ tự sấp xếp" phải là số !';
var mustInput_Name = 'Hãy nhập "họ tên" !';
var mustInput_Company = 'Hãy nhập "tên công ty" !';
var mustInput_Address = 'Hãy nhập "địa chỉ" !';
var mustInput_city = 'Hãy nhập "tỉnh / thành phố" !';
var mustInput_Tel = 'Hãy nhập "số điện thoại" !';
var mustInterger_Tel = '"Số điện thoại" phải là số !';
var mustInput_Email = 'Hãy nhập "hộp thư" !';
var invalid_Email = '"Hộp thư" không đúng định dạng !';
var mustSelect_Country = 'Hãy chọn "quốc gia" !';
//var mustInput_Detail = 'Hãy nhập "nội dung" !';
var mustInput_Search = 'Hãy nhập "nội dung tìm kiếm" !'; | vikigroup/cbuilk | lib/varAlert.vn.unicode.js | JavaScript | apache-2.0 | 819 |
class Hash
def stringify
inject({}) do |options, (key, value)|
options[key.to_s] = value
options
end.sort.flatten.join("-")
end
end | wesabe/pfc | vendor/plugins/wesabe_extensions/lib/collections/stringify_hash.rb | Ruby | apache-2.0 | 155 |
// =================================================================================================
// This file is part of the CLBlast project. The project is licensed under Apache Version 2.0. This
// project loosely follows the Google C++ styleguide and uses a tab-size of two spaces and a max-
// width of 100 characters per line.
//
// Author(s):
// Cedric Nugteren <www.cedricnugteren.nl>
//
// This file implements a class with static methods to describe the Xdotu routine. Examples of
// such 'descriptions' are how to calculate the size a of buffer or how to run the routine. These
// static methods are used by the correctness tester and the performance tester.
//
// =================================================================================================
#ifndef CLBLAST_TEST_ROUTINES_XDOTU_H_
#define CLBLAST_TEST_ROUTINES_XDOTU_H_
#include "test/routines/common.hpp"
namespace clblast {
// =================================================================================================
// See comment at top of file for a description of the class
template <typename T>
class TestXdotu {
public:
// The BLAS level: 1, 2, or 3
static size_t BLASLevel() { return 1; }
// The list of arguments relevant for this routine
static std::vector<std::string> GetOptions() {
return {kArgN,
kArgXInc, kArgYInc,
kArgXOffset, kArgYOffset, kArgDotOffset};
}
static std::vector<std::string> BuffersIn() { return {kBufVecX, kBufVecY, kBufScalar}; }
static std::vector<std::string> BuffersOut() { return {kBufScalar}; }
// Describes how to obtain the sizes of the buffers
static size_t GetSizeX(const Arguments<T> &args) {
return args.n * args.x_inc + args.x_offset;
}
static size_t GetSizeY(const Arguments<T> &args) {
return args.n * args.y_inc + args.y_offset;
}
static size_t GetSizeDot(const Arguments<T> &args) {
return 1 + args.dot_offset;
}
// Describes how to set the sizes of all the buffers
static void SetSizes(Arguments<T> &args) {
args.x_size = GetSizeX(args);
args.y_size = GetSizeY(args);
args.scalar_size = GetSizeDot(args);
}
// Describes what the default values of the leading dimensions of the matrices are
static size_t DefaultLDA(const Arguments<T> &) { return 1; } // N/A for this routine
static size_t DefaultLDB(const Arguments<T> &) { return 1; } // N/A for this routine
static size_t DefaultLDC(const Arguments<T> &) { return 1; } // N/A for this routine
// Describes which transpose options are relevant for this routine
using Transposes = std::vector<Transpose>;
static Transposes GetATransposes(const Transposes &) { return {}; } // N/A for this routine
static Transposes GetBTransposes(const Transposes &) { return {}; } // N/A for this routine
// Describes how to prepare the input data
static void PrepareData(const Arguments<T>&, Queue&, const int, std::vector<T>&,
std::vector<T>&, std::vector<T>&, std::vector<T>&, std::vector<T>&,
std::vector<T>&, std::vector<T>&) {} // N/A for this routine
// Describes how to run the CLBlast routine
static StatusCode RunRoutine(const Arguments<T> &args, Buffers<T> &buffers, Queue &queue) {
auto queue_plain = queue();
auto event = cl_event{};
auto status = Dotu<T>(args.n,
buffers.scalar(), args.dot_offset,
buffers.x_vec(), args.x_offset, args.x_inc,
buffers.y_vec(), args.y_offset, args.y_inc,
&queue_plain, &event);
if (status == StatusCode::kSuccess) { clWaitForEvents(1, &event); clReleaseEvent(event); }
return status;
}
// Describes how to run the clBLAS routine (for correctness/performance comparison)
#ifdef CLBLAST_REF_CLBLAS
static StatusCode RunReference1(const Arguments<T> &args, Buffers<T> &buffers, Queue &queue) {
auto queue_plain = queue();
auto event = cl_event{};
auto status = clblasXdotu<T>(args.n,
buffers.scalar, args.dot_offset,
buffers.x_vec, args.x_offset, args.x_inc,
buffers.y_vec, args.y_offset, args.y_inc,
1, &queue_plain, 0, nullptr, &event);
clWaitForEvents(1, &event);
return static_cast<StatusCode>(status);
}
#endif
// Describes how to run the CPU BLAS routine (for correctness/performance comparison)
#ifdef CLBLAST_REF_CBLAS
static StatusCode RunReference2(const Arguments<T> &args, BuffersHost<T> &buffers_host, Queue &) {
cblasXdotu(args.n,
buffers_host.scalar, args.dot_offset,
buffers_host.x_vec, args.x_offset, args.x_inc,
buffers_host.y_vec, args.y_offset, args.y_inc);
return StatusCode::kSuccess;
}
#endif
// Describes how to run the cuBLAS routine (for correctness/performance comparison)
#ifdef CLBLAST_REF_CUBLAS
static StatusCode RunReference3(const Arguments<T> &args, BuffersCUDA<T> &buffers, Queue &) {
auto status = cublasXdotu(reinterpret_cast<cublasHandle_t>(args.cublas_handle), args.n,
buffers.scalar, args.dot_offset,
buffers.x_vec, args.x_offset, args.x_inc,
buffers.y_vec, args.y_offset, args.y_inc);
if (status == CUBLAS_STATUS_SUCCESS) { return StatusCode::kSuccess; } else { return StatusCode::kUnknownError; }
}
#endif
// Describes how to download the results of the computation (more importantly: which buffer)
static std::vector<T> DownloadResult(const Arguments<T> &args, Buffers<T> &buffers, Queue &queue) {
std::vector<T> result(args.scalar_size, static_cast<T>(0));
buffers.scalar.Read(queue, args.scalar_size, result);
return result;
}
// Describes how to compute the indices of the result buffer
static size_t ResultID1(const Arguments<T> &) { return 1; } // N/A for this routine
static size_t ResultID2(const Arguments<T> &) { return 1; } // N/A for this routine
static size_t GetResultIndex(const Arguments<T> &args, const size_t, const size_t) {
return args.dot_offset;
}
// Describes how to compute performance metrics
static size_t GetFlops(const Arguments<T> &args) {
return 2 * args.n;
}
static size_t GetBytes(const Arguments<T> &args) {
return ((2 * args.n) + 1) * sizeof(T);
}
};
// =================================================================================================
} // namespace clblast
// CLBLAST_TEST_ROUTINES_XDOTU_H_
#endif
| dividiti/CLBlast | test/routines/level1/xdotu.hpp | C++ | apache-2.0 | 6,657 |
/*
AngelCode Tool Box Library
Copyright (c) 2004-2014 Andreas Jonsson
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any
damages arising from the use of this software.
Permission is granted to anyone to use this software for any
purpose, including commercial applications, and to alter it and
redistribute it freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you
must not claim that you wrote the original software. If you use
this software in a product, an acknowledgment in the product
documentation would be appreciated but is not required.
2. Altered source versions must be plainly marked as such, and
must not be misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
Andreas Jonsson
andreas@angelcode.com
*/
// 2014-06-16 Prepared the code to work for both unicode and multibyte applications
#include <Windows.h>
#include "acwin_static.h"
namespace acWindow
{
CStatic::CStatic() : CWindow()
{
isUrl = false;
}
LRESULT CStatic::MsgProc(UINT msg, WPARAM wParam, LPARAM lParam)
{
switch(msg)
{
case WM_LBUTTONUP:
if( isUrl )
{
GoUrl();
return 0;
}
break;
case WM_PAINT:
if( isUrl )
{
OnPaint();
return 0;
}
break;
}
return DefWndProc(msg, wParam, lParam);
}
void CStatic::MakeUrl(const char *url)
{
this->url = url;
isUrl = true;
}
void CStatic::GoUrl()
{
TCHAR urlBuf[1024];
ConvertUtf8ToTChar(url, urlBuf, 1024);
ShellExecute(NULL, __TEXT("open"), urlBuf, NULL, NULL, SW_SHOWNORMAL);
}
void CStatic::OnPaint()
{
TCHAR text[256];
PAINTSTRUCT ps;
HDC dc = BeginPaint(hWnd, &ps);
HFONT font = (HFONT)SendMessage(hWnd, WM_GETFONT, 0, 0);
HFONT oldFont = (HFONT)SelectObject(dc, font);
GetWindowText(hWnd, text, 256);
RECT rc;
GetClientRect(hWnd, &rc);
SetBkMode(dc, TRANSPARENT);
SetTextColor(dc, RGB(0,0,255));
DrawText(dc, text, -1, &rc, 0);
DrawText(dc, text, -1, &rc, DT_CALCRECT);
HPEN pen = CreatePen(PS_SOLID, 1, RGB(0,0,255));
HPEN oldPen = (HPEN)SelectObject(dc, pen);
MoveToEx(dc, rc.left, rc.bottom-1, 0);
LineTo(dc, rc.right, rc.bottom-1);
SelectObject(dc, oldPen);
DeleteObject(pen);
SelectObject(dc, oldFont);
EndPaint(hWnd, &ps);
}
}
| cmbasnett/mandala | tools/bmfont/source/acwin_static.cpp | C++ | apache-2.0 | 2,422 |
using System;
using System.Collections.Generic;
using System.Windows.Documents;
namespace BrailleTranslator.Desktop.Model
{
public class RunComponent : InlineComponent
{
public RunComponent()
{
}
public RunComponent(string title) : base(title)
{
}
public RunComponent(string title, Run run) : base(title, run)
{
}
public RunComponent(Run run) : base(run)
{
}
public string Text
{
get
{
return (Inline as Run).Text;
}
}
public override bool IsVisible
{
get
{
return false;
}
}
protected override void RemoveChild(Component component)
{
throw new NotSupportedException("Run component does not have child components");
}
protected override void PopulateChildren(TextElement textElement)
{
throw new NotSupportedException();
}
protected override void CombineComponents(IEnumerable<Component> components)
{
throw new NotImplementedException();
}
}
} | ethno2405/BrailleTranslator | src/BrailleTranslator.Desktop/Model/RunComponent.cs | C# | apache-2.0 | 1,224 |
//
// browser.js - client-side engine
//
var isFileProtocol = /^(file|chrome(-extension)?|resource|qrc|app):/.test(location.protocol);
less.env = less.env || (location.hostname == '127.0.0.1' ||
location.hostname == '0.0.0.0' ||
location.hostname == 'localhost' ||
location.port.length > 0 ||
isFileProtocol ? 'development'
: 'production');
// Load styles asynchronously (default: false)
//
// This is set to `false` by default, so that the body
// doesn't start loading before the stylesheets are parsed.
// Setting this to `true` can result in flickering.
//
less.async = less.async || false;
less.fileAsync = less.fileAsync || false;
// Interval between watch polls
less.poll = less.poll || (isFileProtocol ? 1000 : 1500);
//Setup user functions
if (less.functions) {
for(var func in less.functions) {
less.tree.functions[func] = less.functions[func];
}
}
var dumpLineNumbers = /!dumpLineNumbers:(comments|mediaquery|all)/.exec(location.hash);
if (dumpLineNumbers) {
less.dumpLineNumbers = dumpLineNumbers[1];
}
//
// Watch mode
//
less.watch = function () {
if (!less.watchMode ){
less.env = 'development';
initRunningMode();
}
return this.watchMode = true
};
less.unwatch = function () {clearInterval(less.watchTimer); return this.watchMode = false; };
function initRunningMode(){
if (less.env === 'development') {
less.optimization = 0;
less.watchTimer = setInterval(function () {
if (less.watchMode) {
loadStyleSheets(function (e, root, _, sheet, env) {
if (e) {
error(e, sheet.href);
} else if (root) {
createCSS(root.toCSS(less), sheet, env.lastModified);
}
});
}
}, less.poll);
} else {
less.optimization = 3;
}
}
if (/!watch/.test(location.hash)) {
less.watch();
}
var cache = null;
if (less.env != 'development') {
try {
cache = (typeof(window.localStorage) === 'undefined') ? null : window.localStorage;
} catch (_) {}
}
//
// Get all <link> tags with the 'rel' attribute set to "stylesheet/less"
//
var links = document.getElementsByTagName('link');
var typePattern = /^text\/(x-)?less$/;
less.sheets = [];
for (var i = 0; i < links.length; i++) {
if (links[i].rel === 'stylesheet/less' || (links[i].rel.match(/stylesheet/) &&
(links[i].type.match(typePattern)))) {
less.sheets.push(links[i]);
}
}
//
// With this function, it's possible to alter variables and re-render
// CSS without reloading less-files
//
var session_cache = '';
less.modifyVars = function(record) {
var str = session_cache;
for (name in record) {
str += ((name.slice(0,1) === '@')? '' : '@') + name +': '+
((record[name].slice(-1) === ';')? record[name] : record[name] +';');
}
new(less.Parser)(new less.tree.parseEnv(less)).parse(str, function (e, root) {
if (e) {
error(e, "session_cache");
} else {
createCSS(root.toCSS(less), less.sheets[less.sheets.length - 1]);
}
});
};
less.refresh = function (reload) {
var startTime, endTime;
startTime = endTime = new(Date);
loadStyleSheets(function (e, root, _, sheet, env) {
if (e) {
return error(e, sheet.href);
}
if (env.local) {
log("loading " + sheet.href + " from cache.");
} else {
log("parsed " + sheet.href + " successfully.");
createCSS(root.toCSS(less), sheet, env.lastModified);
}
log("css for " + sheet.href + " generated in " + (new(Date) - endTime) + 'ms');
(env.remaining === 0) && log("css generated in " + (new(Date) - startTime) + 'ms');
endTime = new(Date);
}, reload);
loadStyles();
};
less.refreshStyles = loadStyles;
less.refresh(less.env === 'development');
function loadStyles() {
var styles = document.getElementsByTagName('style');
for (var i = 0; i < styles.length; i++) {
if (styles[i].type.match(typePattern)) {
var env = new less.tree.parseEnv(less);
env.filename = document.location.href.replace(/#.*$/, '');
new(less.Parser)(env).parse(styles[i].innerHTML || '', function (e, cssAST) {
if (e) {
return error(e, "inline");
}
var css = cssAST.toCSS(less);
var style = styles[i];
style.type = 'text/css';
if (style.styleSheet) {
style.styleSheet.cssText = css;
} else {
style.innerHTML = css;
}
});
}
}
}
function loadStyleSheets(callback, reload) {
for (var i = 0; i < less.sheets.length; i++) {
loadStyleSheet(less.sheets[i], callback, reload, less.sheets.length - (i + 1));
}
}
function pathDiff(url, baseUrl) {
// diff between two paths to create a relative path
var urlParts = extractUrlParts(url),
baseUrlParts = extractUrlParts(baseUrl),
i, max, urlDirectories, baseUrlDirectories, diff = "";
if (urlParts.hostPart !== baseUrlParts.hostPart) {
return "";
}
max = Math.max(baseUrlParts.directories.length, urlParts.directories.length);
for(i = 0; i < max; i++) {
if (baseUrlParts.directories[i] !== urlParts.directories[i]) { break; }
}
baseUrlDirectories = baseUrlParts.directories.slice(i);
urlDirectories = urlParts.directories.slice(i);
for(i = 0; i < baseUrlDirectories.length-1; i++) {
diff += "../";
}
for(i = 0; i < urlDirectories.length-1; i++) {
diff += urlDirectories[i] + "/";
}
return diff;
}
function extractUrlParts(url, baseUrl) {
// urlParts[1] = protocol&hostname || /
// urlParts[2] = / if path relative to host base
// urlParts[3] = directories
// urlParts[4] = filename
// urlParts[5] = parameters
var urlPartsRegex = /^((?:[a-z-]+:)?\/+?(?:[^\/\?#]*\/)|([\/\\]))?((?:[^\/\\\?#]*[\/\\])*)([^\/\\\?#]*)([#\?].*)?$/,
urlParts = url.match(urlPartsRegex),
returner = {}, directories = [], i, baseUrlParts;
if (!urlParts) {
throw new Error("Could not parse sheet href - '"+url+"'");
}
// Stylesheets in IE don't always return the full path
if (!urlParts[1] || urlParts[2]) {
baseUrlParts = baseUrl.match(urlPartsRegex);
if (!baseUrlParts) {
throw new Error("Could not parse page url - '"+baseUrl+"'");
}
urlParts[1] = baseUrlParts[1];
if (!urlParts[2]) {
urlParts[3] = baseUrlParts[3] + urlParts[3];
}
}
if (urlParts[3]) {
directories = urlParts[3].replace("\\", "/").split("/");
for(i = 0; i < directories.length; i++) {
if (directories[i] === ".." && i > 0) {
directories.splice(i-1, 2);
i -= 2;
}
}
}
returner.hostPart = urlParts[1];
returner.directories = directories;
returner.path = urlParts[1] + directories.join("/");
returner.fileUrl = returner.path + (urlParts[4] || "");
returner.url = returner.fileUrl + (urlParts[5] || "");
return returner;
}
function loadStyleSheet(sheet, callback, reload, remaining) {
// sheet may be set to the stylesheet for the initial load or a collection of properties including
// some env variables for imports
var hrefParts = extractUrlParts(sheet.href, window.location.href);
var href = hrefParts.url;
var css = cache && cache.getItem(href);
var timestamp = cache && cache.getItem(href + ':timestamp');
var styles = { css: css, timestamp: timestamp };
var env;
if (sheet instanceof less.tree.parseEnv) {
env = new less.tree.parseEnv(sheet);
} else {
env = new less.tree.parseEnv(less);
env.entryPath = hrefParts.path;
env.mime = sheet.type;
}
if (env.relativeUrls) {
//todo - this relies on option being set on less object rather than being passed in as an option
// - need an originalRootpath
if (less.rootpath) {
env.rootpath = extractUrlParts(less.rootpath + pathDiff(hrefParts.path, env.entryPath)).path;
} else {
env.rootpath = hrefParts.path;
}
} else {
if (!less.rootpath) {
env.rootpath = env.entryPath;
}
}
xhr(href, sheet.type, function (data, lastModified) {
// Store data this session
session_cache += data.replace(/@import .+?;/ig, '');
if (!reload && styles && lastModified &&
(new(Date)(lastModified).valueOf() ===
new(Date)(styles.timestamp).valueOf())) {
// Use local copy
createCSS(styles.css, sheet);
callback(null, null, data, sheet, { local: true, remaining: remaining }, href);
} else {
// Use remote copy (re-parse)
try {
env.contents[href] = data; // Updating content cache
env.paths = [hrefParts.path];
env.filename = href;
env.rootFilename = env.rootFilename || href;
new(less.Parser)(env).parse(data, function (e, root) {
if (e) { return callback(e, null, null, sheet); }
try {
callback(e, root, data, sheet, { local: false, lastModified: lastModified, remaining: remaining }, href);
//TODO - there must be a better way? A generic less-to-css function that can both call error
//and removeNode where appropriate
//should also add tests
if (env.rootFilename === href) {
removeNode(document.getElementById('less-error-message:' + extractId(href)));
}
} catch (e) {
callback(e, null, null, sheet);
}
});
} catch (e) {
callback(e, null, null, sheet);
}
}
}, function (status, url) {
callback({ type: 'File', message: "'" + url + "' wasn't found (" + status + ")" }, null, null, sheet);
});
}
function extractId(href) {
return href.replace(/^[a-z-]+:\/+?[^\/]+/, '' ) // Remove protocol & domain
.replace(/^\//, '' ) // Remove root /
.replace(/\.[a-zA-Z]+$/, '' ) // Remove simple extension
.replace(/[^\.\w-]+/g, '-') // Replace illegal characters
.replace(/\./g, ':'); // Replace dots with colons(for valid id)
}
function createCSS(styles, sheet, lastModified) {
var css;
// Strip the query-string
var href = sheet.href || '';
// If there is no title set, use the filename, minus the extension
var id = 'less:' + (sheet.title || extractId(href));
// If the stylesheet doesn't exist, create a new node
if ((css = document.getElementById(id)) === null) {
css = document.createElement('style');
css.type = 'text/css';
if( sheet.media ){ css.media = sheet.media; }
css.id = id;
var nextEl = sheet && sheet.nextSibling || null;
(nextEl || document.getElementsByTagName('head')[0]).parentNode.insertBefore(css, nextEl);
}
if (css.styleSheet) { // IE
try {
css.styleSheet.cssText = styles;
} catch (e) {
throw new(Error)("Couldn't reassign styleSheet.cssText.");
}
} else {
(function (node) {
if (css.childNodes.length > 0) {
if (css.firstChild.nodeValue !== node.nodeValue) {
css.replaceChild(node, css.firstChild);
}
} else {
css.appendChild(node);
}
})(document.createTextNode(styles));
}
// Don't update the local store if the file wasn't modified
if (lastModified && cache) {
log('saving ' + href + ' to cache.');
try {
cache.setItem(href, styles);
cache.setItem(href + ':timestamp', lastModified);
} catch(e) {
//TODO - could do with adding more robust error handling
log('failed to save');
}
}
}
function xhr(url, type, callback, errback) {
var xhr = getXMLHttpRequest();
var async = isFileProtocol ? less.fileAsync : less.async;
if (typeof(xhr.overrideMimeType) === 'function') {
xhr.overrideMimeType('text/css');
}
xhr.open('GET', url, async);
xhr.setRequestHeader('Accept', type || 'text/x-less, text/css; q=0.9, */*; q=0.5');
xhr.send(null);
if (isFileProtocol && !less.fileAsync) {
if (xhr.status === 0 || (xhr.status >= 200 && xhr.status < 300)) {
callback(xhr.responseText);
} else {
errback(xhr.status, url);
}
} else if (async) {
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
handleResponse(xhr, callback, errback);
}
};
} else {
handleResponse(xhr, callback, errback);
}
function handleResponse(xhr, callback, errback) {
if (xhr.status >= 200 && xhr.status < 300) {
callback(xhr.responseText,
xhr.getResponseHeader("Last-Modified"));
} else if (typeof(errback) === 'function') {
errback(xhr.status, url);
}
}
}
function getXMLHttpRequest() {
if (window.XMLHttpRequest) {
return new(XMLHttpRequest);
} else {
try {
return new(ActiveXObject)("MSXML2.XMLHTTP.3.0");
} catch (e) {
log("browser doesn't support AJAX.");
return null;
}
}
}
function removeNode(node) {
return node && node.parentNode.removeChild(node);
}
function log(str) {
if (less.env == 'development' && typeof(console) !== "undefined") { console.log('less: ' + str) }
}
function error(e, rootHref) {
var id = 'less-error-message:' + extractId(rootHref || "");
var template = '<li><label>{line}</label><pre class="{class}">{content}</pre></li>';
var elem = document.createElement('div'), timer, content, error = [];
var filename = e.filename || rootHref;
var filenameNoPath = filename.match(/([^\/]+(\?.*)?)$/)[1];
elem.id = id;
elem.className = "less-error-message";
content = '<h3>' + (e.type || "Syntax") + "Error: " + (e.message || 'There is an error in your .less file') +
'</h3>' + '<p>in <a href="' + filename + '">' + filenameNoPath + "</a> ";
var errorline = function (e, i, classname) {
if (e.extract[i] != undefined) {
error.push(template.replace(/\{line\}/, (parseInt(e.line) || 0) + (i - 1))
.replace(/\{class\}/, classname)
.replace(/\{content\}/, e.extract[i]));
}
};
if (e.extract) {
errorline(e, 0, '');
errorline(e, 1, 'line');
errorline(e, 2, '');
content += 'on line ' + e.line + ', column ' + (e.column + 1) + ':</p>' +
'<ul>' + error.join('') + '</ul>';
} else if (e.stack) {
content += '<br/>' + e.stack.split('\n').slice(1).join('<br/>');
}
elem.innerHTML = content;
// CSS for error messages
createCSS([
'.less-error-message ul, .less-error-message li {',
'list-style-type: none;',
'margin-right: 15px;',
'padding: 4px 0;',
'margin: 0;',
'}',
'.less-error-message label {',
'font-size: 12px;',
'margin-right: 15px;',
'padding: 4px 0;',
'color: #cc7777;',
'}',
'.less-error-message pre {',
'color: #dd6666;',
'padding: 4px 0;',
'margin: 0;',
'display: inline-block;',
'}',
'.less-error-message pre.line {',
'color: #ff0000;',
'}',
'.less-error-message h3 {',
'font-size: 20px;',
'font-weight: bold;',
'padding: 15px 0 5px 0;',
'margin: 0;',
'}',
'.less-error-message a {',
'color: #10a',
'}',
'.less-error-message .error {',
'color: red;',
'font-weight: bold;',
'padding-bottom: 2px;',
'border-bottom: 1px dashed red;',
'}'
].join('\n'), { title: 'error-message' });
elem.style.cssText = [
"font-family: Arial, sans-serif",
"border: 1px solid #e00",
"background-color: #eee",
"border-radius: 5px",
"-webkit-border-radius: 5px",
"-moz-border-radius: 5px",
"color: #e00",
"padding: 15px",
"margin-bottom: 15px"
].join(';');
if (less.env == 'development') {
timer = setInterval(function () {
if (document.body) {
if (document.getElementById(id)) {
document.body.replaceChild(elem, document.getElementById(id));
} else {
document.body.insertBefore(elem, document.body.firstChild);
}
clearInterval(timer);
}
}, 10);
}
}
| wcea/less.js | lib/less/browser.js | JavaScript | apache-2.0 | 17,695 |
/*
Copyright 2012 - 2014 Jerome Leleu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.openid.profile.yahoo;
import org.pac4j.core.profile.AttributesDefinition;
import org.pac4j.core.profile.converter.Converters;
/**
* This class defines the attributes of the {@link YahooOpenIdProfile}.
*
* @author Patrice de Saint Steban
* @since 1.6.0
*/
public class YahooOpenIdAttributesDefinition extends AttributesDefinition {
public static final String EMAIL = "email";
public static final String LANGUAGE = "language";
public static final String FULLNAME = "fullname";
public static final String PROFILEPICTURE = "picture_url";
public YahooOpenIdAttributesDefinition() {
addAttribute(EMAIL, Converters.stringConverter);
addAttribute(LANGUAGE, Converters.localeConverter);
addAttribute(FULLNAME, Converters.stringConverter);
addAttribute(PROFILEPICTURE, Converters.genderConverter);
}
}
| leleuj/pac4j | pac4j-openid/src/main/java/org/pac4j/openid/profile/yahoo/YahooOpenIdAttributesDefinition.java | Java | apache-2.0 | 1,480 |
/*
* Specialized Sub class to handle implementation of LRU using Linked list and
* HashMap
*/
package cache;
import java.util.LinkedList;
/**
*
* @author gk
*/
public class LRUCache<Key,Value> extends Cache<Key,Value> {
LinkedList<Key> list;
public LRUCache(int capacity){
super(capacity,CacheType.LRU);
//Keeps track of MRU values at first and LRU values at last
list = new LinkedList<Key>();
}
@Override
synchronized void add(Key key, Value value){
if(key == null || value == null) {
throw new NullPointerException("Null key or value");
}
//check for full capacity
if(list.size() == getCapacity()){
// remove LRU
hmap.remove(list.removeLast());
}
// Count update as Access
if(hmap.containsKey(key)){
list.remove(key);
list.addFirst(key);
}
list.add(key);
hmap.put(key,value);
}
@Override
synchronized Value get(Key key) {
if(key == null){
throw new NullPointerException("Key is Null");
}
if(hmap.containsKey(key)) {
//Most recently used is put in Front, least recently used in last
list.remove(key);
list.addFirst(key);
return hmap.get(key);
}
return null;
}
}
| ganesh-karthick/Cache | src/cache/LRUCache.java | Java | apache-2.0 | 1,428 |
using Elders.Cronus.Projections;
using Machine.Specifications;
namespace Elders.Cronus.Tests.Projections
{
[Subject("Projections")]
public class When_comparing_lt_projection_version_with_different_hashes
{
Establish context = () =>
{
lower = new ProjectionVersion("compare_lt", ProjectionStatus.Live, 1, "compare_lt_hash");
higher = new ProjectionVersion("compare_lt", ProjectionStatus.Live, 2, "ops");
};
Because of = () => result = lower < higher;
It should_be_able_to_compare_with_lt = () => result.ShouldBeTrue();
static bool result;
static ProjectionVersion lower;
static ProjectionVersion higher;
}
}
| Elders/Cronus | src/Elders.Cronus.Tests/Projections/When_comparing_lt_projection_version_with_different_hashes.cs | C# | apache-2.0 | 718 |
/* global QUnit, sinon, testEventHandlerResolver, someGlobalMethodOnWindow */
sap.ui.define([
"sap/ui/core/Control",
"sap/ui/model/json/JSONModel",
"sap/ui/core/mvc/EventHandlerResolver",
"sap/base/Log"
], function(Control, JSONModel, EventHandlerResolver, Log) {
"use strict";
var oController;
var thisContext;
var oDummySource;
var DummyControl = Control.extend("test.DummyControl", {
metadata: {
properties: {
someControlProperty: "string"
}
}
});
var oDummyEvent = {
getSource: function() {
return oDummySource;
},
mParameters: {
someEventParameter: "someEventParameterValue"
}
};
var mLocals = {
someMethod: function() {
thisContext = this;
},
someFormatter: function() {
return "#" + Array.prototype.slice.call(arguments).join(",") + "#";
}
};
QUnit.module("sap.ui.core.mvc.EventHandlerResolver - handler function", {
beforeEach: function() {
thisContext = null;
oController = {
fnControllerMethod: function() {
thisContext = this;
},
ns: {
deepMethod: function() {
}
}
};
window.testEventHandlerResolver = {
subobject: {
someGlobalMethod: function() {
thisContext = this;
}
}
};
window.someGlobalMethodOnWindow = function() {
thisContext = this;
};
oDummySource = new DummyControl();
},
afterEach: function() {
oController = null;
window.testEventHandlerResolver = null;
oDummySource.destroy();
}
});
QUnit.test("Plain handler resolution", function(assert) {
var fnController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod", oController)[0];
assert.equal(fnController, oController.fnControllerMethod, "Controller method should be found");
var fnGlobal = EventHandlerResolver.resolveEventHandler("testEventHandlerResolver.subobject.someGlobalMethod", oController)[0];
assert.equal(fnGlobal, window.testEventHandlerResolver.subobject.someGlobalMethod, "Global method should be found");
fnGlobal = EventHandlerResolver.resolveEventHandler("ns.deepMethod", oController);
assert.strictEqual(fnGlobal, undefined, "Function name with deeper path shouldn't be searched in the controller");
});
QUnit.test("Handler resolution when parentheses are present", function(assert) {
sinon.spy(oController, "fnControllerMethod");
var fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod()", oController)[0];
fnFromController(oDummyEvent);
assert.equal(oController.fnControllerMethod.callCount, 1, "Controller method should be called");
oController.fnControllerMethod.resetHistory();
var fnFromController2 = EventHandlerResolver.resolveEventHandler("fnControllerMethod()", oController)[0];
fnFromController2(oDummyEvent);
assert.equal(oController.fnControllerMethod.callCount, 1, "Controller method without dot should be called");
sinon.spy(testEventHandlerResolver.subobject, "someGlobalMethod");
var fnFromGlobal = EventHandlerResolver.resolveEventHandler("testEventHandlerResolver.subobject.someGlobalMethod()", oController)[0];
fnFromGlobal(oDummyEvent);
assert.equal(testEventHandlerResolver.subobject.someGlobalMethod.callCount, 1, "Global method should be called once");
sinon.spy(window, "someGlobalMethodOnWindow");
fnFromGlobal = EventHandlerResolver.resolveEventHandler("someGlobalMethodOnWindow()", oController)[0];
fnFromGlobal(oDummyEvent);
assert.equal(someGlobalMethodOnWindow.callCount, 1, "Global method without dot should be called once");
});
QUnit.test("Handler resolution with local variables", function(assert) {
var oSpy = this.spy(mLocals, "someMethod");
// immediately call the resolving handler
EventHandlerResolver.resolveEventHandler("Module.someMethod()", oController, {Module: mLocals})[0]();
assert.equal(oSpy.callCount, 1, "Module method should be called once");
oSpy.resetHistory();
// without parentheses
EventHandlerResolver.resolveEventHandler("Module.someMethod", oController, {Module: mLocals})[0]();
assert.equal(oSpy.callCount, 1, "Module method should be called once");
oSpy.resetHistory();
// test without associated controller
EventHandlerResolver.resolveEventHandler("Module.someMethod()", null, {Module: mLocals})[0]();
assert.equal(oSpy.callCount, 1, "Module method should be called once");
oSpy.resetHistory();
// without parentheses
EventHandlerResolver.resolveEventHandler("Module.someMethod", null, {Module: mLocals})[0]();
assert.equal(oSpy.callCount, 1, "Module method should be called once");
});
QUnit.test("Log warning for usage of not properly XML-required modules", function(assert) {
var logSpy = sinon.spy(Log, "warning");
// immediately call the resolving handler
EventHandlerResolver.resolveEventHandler("Module.someMethod()", oController, {Module: {}});
sinon.assert.calledWithExactly(logSpy, "Event handler name 'Module.someMethod()' could not be resolved to an event handler function");
logSpy.resetHistory();
// test without associated controller
EventHandlerResolver.resolveEventHandler("Module.someMethod()", null, {Module: {}});
sinon.assert.calledWithExactly(logSpy, "Event handler name 'Module.someMethod()' could not be resolved to an event handler function");
logSpy.restore();
});
QUnit.test("'this' context when no parenthese is present", function(assert) {
// controller functions
var vResolvedHandler = EventHandlerResolver.resolveEventHandler(".fnControllerMethod", oController);
vResolvedHandler[0].call(vResolvedHandler[1], oDummyEvent);
assert.equal(thisContext, oController, "Controller method should be called with controller as 'this' context");
thisContext = "wrong"; // to make sure non-calls don't accidentally get the correct value
// controller functions without dot
vResolvedHandler = EventHandlerResolver.resolveEventHandler("fnControllerMethod", oController);
vResolvedHandler[0].call(vResolvedHandler[1], oDummyEvent);
assert.equal(thisContext, oController, "Controller method without dot should be called with controller as 'this' context");
thisContext = "wrong";
// global functions
vResolvedHandler = EventHandlerResolver.resolveEventHandler("testEventHandlerResolver.subobject.someGlobalMethod", oController);
vResolvedHandler[0].call(vResolvedHandler[1], oDummyEvent);
assert.equal(thisContext, oController, "Global method should be called with controller as 'this' context when there's no parenthese");
thisContext = "wrong";
// global functions without dot
vResolvedHandler = EventHandlerResolver.resolveEventHandler("someGlobalMethodOnWindow", oController);
vResolvedHandler[0].call(vResolvedHandler[1], oDummyEvent);
assert.equal(thisContext, oController, "Global method without dot should be called with oController as 'this' context when there's no parenthese");
thisContext = "wrong";
// with local variables
vResolvedHandler = EventHandlerResolver.resolveEventHandler("Module.someMethod", oController, {Module: mLocals});
vResolvedHandler[0].call(vResolvedHandler[1], oDummyEvent);
assert.equal(thisContext, oController, "XML-required module should be called with oController as 'this' context when there's no parenthese");
thisContext = "wrong";
});
QUnit.test("'this' context when parentheses are present", function(assert) {
// controller functions
var fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod()", oController)[0];
fnFromController(oDummyEvent);
assert.equal(thisContext, oController, "Controller method should be called with controller as 'this' context");
thisContext = "wrong"; // to make sure non-calls don't accidentally get the correct value
// controller functions without dot
fnFromController = EventHandlerResolver.resolveEventHandler("fnControllerMethod()", oController)[0];
fnFromController(oDummyEvent);
assert.equal(thisContext, oController, "Controller method without dot should be called with controller as 'this' context");
thisContext = "wrong";
// global functions
var fnFromGlobal = EventHandlerResolver.resolveEventHandler("testEventHandlerResolver.subobject.someGlobalMethod()", oController)[0];
fnFromGlobal(oDummyEvent);
assert.equal(thisContext, testEventHandlerResolver.subobject, "Global method should be called with testEventHandlerResolver.subobject as 'this' context");
thisContext = "wrong";
// global functions without dot
fnFromGlobal = EventHandlerResolver.resolveEventHandler("someGlobalMethodOnWindow()", oController)[0];
fnFromGlobal(oDummyEvent);
assert.equal(thisContext, undefined, "Global method without dot should be called with undefined as 'this' context");
thisContext = "wrong";
// global functions with .call()
fnFromGlobal = EventHandlerResolver.resolveEventHandler("testEventHandlerResolver.subobject.someGlobalMethod.call($controller)", oController)[0];
fnFromGlobal(oDummyEvent);
assert.equal(thisContext, oController, "Global method should be called with controller as 'this' context when set using .call($controller)");
thisContext = "wrong";
// with local variables
var fnFromModule = EventHandlerResolver.resolveEventHandler("Module.someMethod()", oController, {Module: mLocals})[0];
fnFromModule(oDummyEvent);
assert.equal(thisContext, mLocals, "XML-required module should be called with the module as 'this' context");
thisContext = "wrong";
// with local variables and with .call()
fnFromModule = EventHandlerResolver.resolveEventHandler("Module.someMethod.call($controller)", oController, {Module: mLocals})[0];
fnFromModule(oDummyEvent);
assert.equal(thisContext, oController, "XML-required module should be called with controller as 'this' context when set using .call($controller)");
thisContext = "wrong";
});
QUnit.module("sap.ui.core.mvc.EventHandlerResolver - parameter resolution", {
beforeEach: function() {
oController = {
fnControllerMethod: function(){},
myFormatter: function() {
return "#" + Array.prototype.slice.call(arguments).join(",") + "#";
}
};
window.testEventHandlerResolver = {
subobject: {
someGlobalMethod: function(){}
}
};
oDummySource = new DummyControl({someControlProperty: "someControlPropertyValue"});
var oModel = new JSONModel({
someModelProperty: "someModelValue",
someDateProperty: '2011-10-29',
someNumberProperty: 49
});
oDummySource.setModel(oModel);
oModel = new JSONModel({
subnode: {
someSecondModelProperty: "someSecondModelValue"
}
});
oDummySource.setModel(oModel, "secondModel");
oDummySource.bindElement({path: "/subnode", model: "secondModel"});
},
afterEach: function() {
oController = null;
window.testEventHandlerResolver = null;
oDummySource.getModel().destroy();
oDummySource.getModel("secondModel").destroy();
oDummySource.destroy();
}
});
QUnit.test("static values", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var aTests = [
{src: ".fnControllerMethod(\"test\")", expected: "test", message: "Static value with double quotes within double quotes should be correctly given"},
{src: ".fnControllerMethod('test')", expected: "test", message: "Static value with single quotes within double quotes should be correctly given"},
{src: '.fnControllerMethod("test")', expected: "test", message: "Static value with double quotes within single quotes should be correctly given"},
{src: '.fnControllerMethod(\'test\')', expected: "test", message: "Static value with single quotes within single quotes should be correctly given"},
{src: ".fnControllerMethod(true)", expected: true, message: "Boolean static value 'true' should be correctly given"},
{src: ".fnControllerMethod(false)", expected: false, message: "Boolean static value 'false' should be correctly given"},
{src: ".fnControllerMethod(49)", expected: 49, message: "Static number value should be correctly given"},
{src: ".fnControllerMethod(49.95)", expected: 49.95, message: "Static float value should be correctly given"},
{src: ".fnControllerMethod({'x': 'y'})", expected: {'x': 'y'}, message: "Static object value should be correctly given"},
{src: ".fnControllerMethod({x: 'y'})", expected: {'x': 'y'}, message: "Static object value should be correctly given"},
{src: ".fnControllerMethod({x: 'y', z: {a: 1}})", expected: {'x': 'y', z: {a: 1}}, message: "Static object value should be correctly given"},
{src: ".fnControllerMethod(null)", expected: null, message: "Static null value should be correctly given"}
];
var fnFromController;
for (var i = 0; i < aTests.length; i++) {
fnFromController = EventHandlerResolver.resolveEventHandler(aTests[i].src, oController)[0];
fnFromController(oDummyEvent);
assert.deepEqual(spy.args[i], [aTests[i].expected], aTests[i].message);
}
});
QUnit.test("Special value: $controller", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod($controller)", oController)[0];
fnFromController(oDummyEvent);
assert.deepEqual(spy.args[0], [oController], "Parameter $controller should be given as the controller instance");
});
QUnit.test("Special value: $event", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod($event)", oController)[0];
fnFromController(oDummyEvent);
assert.deepEqual(spy.args[0], [oDummyEvent], "Parameter $event should be given as the event object");
});
QUnit.test("bound values with controller method", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var fnFromController;
var mTestSet = {
".fnControllerMethod(${/someModelProperty})": "someModelValue", // plain, absolute binding path
" .fnControllerMethod ( ${/someModelProperty} ) ": "someModelValue", // some whitespace fun
".fnControllerMethod(${secondModel>someSecondModelProperty})": "someSecondModelValue", // relative path using element binding, in named model
".fnControllerMethod(${path:'/someModelProperty'})": "someModelValue", // complex syntax, entry-level
".fnControllerMethod(${path:'/someModelProperty', formatter: '.myFormatter'})": "#someModelValue#", // complex syntax with formatter
".fnControllerMethod(${path:'/someModelProperty', formatter: '.myFormatter', type: 'sap.ui.model.type.String'})": "#someModelValue#", // complex syntax with type
// does not work, deep nesting of parts is not supported in EventHandlerResolver:
//".fnControllerMethod(${parts: ['/someModelProperty'], formatter: '.myFormatter'})": "#someModelValue,someModelValue#", // complex syntax with mixed parts
".fnControllerMethod(${$parameters>/someEventParameter})": "someEventParameterValue", // another model (event parameters)
".fnControllerMethod(${$source>/someControlProperty})": "someControlPropertyValue", // the event source model
".fnControllerMethod('Value is: ' + ${/someModelProperty})": "Value is: someModelValue", // “calculated fields” (template string)
".fnControllerMethod(${/someModelProperty} + ',' + ${/someModelProperty})": "someModelValue,someModelValue", // attention, also a calculated field!
".fnControllerMethod(\"Value is: \" + ${path:'/someModelProperty', formatter: '.myFormatter', type: 'sap.ui.model.type.String'})": "Value is: #someModelValue#", // calculated field with complex binding syntax
// not allowed to use binding expressions inside because the entire string is a binding expression:
//".fnControllerMethod({= ${/someModelProperty} + ${/someModelProperty}})": "someModelValuesomeModelValue", // expression binding
".fnControllerMethod({x: 'y', z: {a: ${/someModelProperty}}})": {x: 'y', z: {a: "someModelValue"}}, // binding in object
'.fnControllerMethod(${path:\'/someModelProperty\',formatter: \'.myFormatter\'})': "#someModelValue#", // single quotes escaped
".fnControllerMethod(${path:\"/someModelProperty\",formatter: \".myFormatter\"})": "#someModelValue#" // double quotes escaped
};
for (var sTestString in mTestSet) {
spy.resetHistory();
fnFromController = EventHandlerResolver.resolveEventHandler(sTestString, oController)[0];
fnFromController(oDummyEvent);
assert.deepEqual(spy.args[0], [mTestSet[sTestString]], "Bound model property value should be correctly calculated for: " + sTestString);
}
});
QUnit.test("bound values with XML-required modules", function(assert) {
var methodSpy = this.spy(mLocals, "someMethod");
var fnFromModule;
var mTestSet = {
"Module.someMethod(${/someModelProperty})": "someModelValue", // plain, absolute binding path
" Module.someMethod ( ${/someModelProperty} ) ": "someModelValue", // some whitespace fun
"Module.someMethod(${secondModel>someSecondModelProperty})": "someSecondModelValue", // relative path using element binding, in named model
"Module.someMethod(${path:'/someModelProperty'})": "someModelValue", // complex syntax, entry-level
"Module.someMethod(${path:'/someModelProperty', formatter: 'Module.someFormatter'})": "#someModelValue#", // complex syntax with formatter
"Module.someMethod(${path:'/someModelProperty', formatter: 'Module.someFormatter', type: 'sap.ui.model.type.String'})": "#someModelValue#", // complex syntax with type
// does not work, deep nesting of parts is not supported in EventHandlerResolver:
//"Module.someMethod(${parts: ['/someModelProperty'], formatter: 'Module.someFormatter'})": "#someModelValue,someModelValue#", // complex syntax with mixed parts
"Module.someMethod(${$parameters>/someEventParameter})": "someEventParameterValue", // another model (event parameters)
"Module.someMethod(${$source>/someControlProperty})": "someControlPropertyValue", // the event source model
"Module.someMethod('Value is: ' + ${/someModelProperty})": "Value is: someModelValue", // “calculated fields” (template string)
"Module.someMethod(${/someModelProperty} + ',' + ${/someModelProperty})": "someModelValue,someModelValue", // attention, also a calculated field!
"Module.someMethod(\"Value is: \" + ${path:'/someModelProperty', formatter: 'Module.someFormatter', type: 'sap.ui.model.type.String'})": "Value is: #someModelValue#", // calculated field with complex binding syntax
// not allowed to use binding expressions inside because the entire string is a binding expression:
//"Module.someMethod({= ${/someModelProperty} + ${/someModelProperty}})": "someModelValuesomeModelValue", // expression binding
"Module.someMethod({x: 'y', z: {a: ${/someModelProperty}}})": {x: 'y', z: {a: "someModelValue"}}, // binding in object
'Module.someMethod(${path:\'/someModelProperty\',formatter: \'Module.someFormatter\'})': "#someModelValue#", // single quotes escaped
"Module.someMethod(${path:\"/someModelProperty\",formatter: \"Module.someFormatter\"})": "#someModelValue#" // double quotes escaped
};
for (var sTestString in mTestSet) {
methodSpy.resetHistory();
fnFromModule = EventHandlerResolver.resolveEventHandler(sTestString, oController, {Module: mLocals})[0];
fnFromModule(oDummyEvent);
assert.deepEqual(methodSpy.args[0], [mTestSet[sTestString]], "Bound model property value should be correctly calculated for: " + sTestString);
}
});
QUnit.test("multiple parameters", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var fnFromController;
var mTestSet = { // now the values are arrays
".fnControllerMethod('test',${/someModelProperty})": ["test", "someModelValue"], // two parameters
".fnControllerMethod( 'test' , ${/someModelProperty} )": ["test", "someModelValue"] // some whitespace fun
};
for (var sTestString in mTestSet) {
spy.resetHistory();
fnFromController = EventHandlerResolver.resolveEventHandler(sTestString, oController)[0];
fnFromController(oDummyEvent);
assert.deepEqual(spy.args[0], mTestSet[sTestString], "Bound model property value should be correctly calculated for: " + sTestString);
}
});
QUnit.test("types", function(assert) {
var spy = sinon.spy(oController, "fnControllerMethod");
var fnFromController;
var mTestSet = {
".fnControllerMethod(${path:'/someNumberProperty', type: 'sap.ui.model.type.Integer', targetType: 'int'})": 49, // complex syntax with type
".fnControllerMethod(${path:'/someNumberProperty', type: 'sap.ui.model.type.Integer', targetType: 'string'})": "49", // type conversion
".fnControllerMethod(${path:'/someDateProperty', type: 'sap.ui.model.type.Date', formatOptions: {pattern: 'dd.MM.yyyy',source: {pattern: 'yyyy-MM-dd'}}})": "29.10.2011" // type with format options
};
for (var sTestString in mTestSet) {
spy.resetHistory();
fnFromController = EventHandlerResolver.resolveEventHandler(sTestString, oController)[0];
fnFromController(oDummyEvent);
assert.strictEqual(spy.args[0][0], mTestSet[sTestString], "Bound model property value should be correctly calculated for: " + sTestString);
}
});
QUnit.test("error cases (and edge cases)", function(assert) {
var fnFromController;
// unclosed braces
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(${/someModelProperty)", oController)[0];
fnFromController(oDummyEvent);
} , function(err){
return err.message.indexOf("no closing braces found") > -1;
}, "Correct error should be thrown for non-matching braces");
// unresolvable formatter
var spy = sinon.spy(Log, "error");
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(${path:'/someModelProperty', formatter: '.myNotExistingFormatter'})", oController)[0];
fnFromController(oDummyEvent);
assert.equal(spy.callCount, 1, "Error should be logged for unresolvable formatter");
assert.ok(spy.args[0][0].indexOf("formatter function .myNotExistingFormatter not found") > -1, "Error should be logged for unresolvable formatter");
// globals within the expression
spy = sinon.spy(Log, "warning");
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(Math.max(1))", oController)[0];
fnFromController(oDummyEvent);
assert.equal(spy.callCount, 2, "Warning should be logged for globals inside parameter section");
assert.ok(spy.args[0][0].indexOf("Unsupported global identifier") > -1, "Warning should be logged for globals inside parameter section");
Log.warning.restore();
// wrong expression syntax
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(${/someModelProperty} + {/someModelProperty})", oController)[0];
fnFromController(oDummyEvent);
}, function(err){
return err.message.indexOf("Expected IDENTIFIER") > -1;
}, "Error should be thrown for expression syntax error");
// no expressions within
spy = sinon.spy(Log, "warning");
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod({= 'abc'})", oController)[0];
assert.equal(spy.callCount, 1, "Warning should be logged for expressions inside parameter section");
assert.ok(spy.args[0][0].indexOf("event handler parameter contains a binding expression") > -1, "Warning should be logged for expressions inside parameter section");
Log.warning.restore();
assert.throws(function(){
fnFromController(oDummyEvent);
}, function(err){
return true; // browser-dependent message
}, "Error should be thrown for expressions inside parameter section");
// starting with a brace
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler("(${/someModelProperty})", oController)[0];
fnFromController(oDummyEvent);
}, function(err){
return err.message.indexOf("starts with a bracket") > -1;
}, "Error should be thrown when starting with a bracket");
// wrong binding path
/* TODO: help the user detect such issues without making too much noise when an empty value is perfectly fine
spy = sinon.spy(Log, "warning");
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(${/thisdoesnotExist})", oController)[0];
fnFromController(oDummyEvent);
assert.equal(spy.callCount, 1, "Warning should be logged for empty values (which may indicate wrong bindings)");
assert.ok(spy.args[0][0].indexOf("EventHandlerResolver: no value was returned") > -1, "Warning should be logged for empty values (which may indicate wrong bindings)");
*/
// too many closing braces
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(${/someModelProperty}})", oController)[0];
fnFromController(oDummyEvent);
}, function(err){
return err.message.indexOf("but instead saw }") > -1;
}, "Error should be thrown for too many closing braces");
// non-closed single quotes
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod('x)", oController)[0];
fnFromController(oDummyEvent);
}, function(err){
return err.message.indexOf("Bad") > -1;
}, "Error should be thrown for non-closed single quotes");
// non-closed double quotes
assert.throws(function(){
fnFromController = EventHandlerResolver.resolveEventHandler(".fnControllerMethod(\"x)", oController)[0];
fnFromController(oDummyEvent);
}, function(err){
return err.message.indexOf("Bad") > -1;
}, "Error should be thrown for non-closed double quotes");
});
QUnit.module("sap.ui.core.mvc.EventHandlerResolver - parse()");
QUnit.test("one event handler", function (assert) {
assert.deepEqual(EventHandlerResolver.parse(".fnControllerMethod"), [".fnControllerMethod"]);
});
QUnit.test("several event handlers", function (assert) {
assert.deepEqual(
EventHandlerResolver.parse(".fnControllerMethod; globalFunction"),
[".fnControllerMethod", "globalFunction"]
);
});
QUnit.test("several event handlers with trailing semicolon", function (assert) {
assert.deepEqual(
EventHandlerResolver.parse(".fnControllerMethod; globalFunction;"),
[".fnControllerMethod", "globalFunction"]
);
});
QUnit.test("several event handlers with parameters", function (assert) {
assert.deepEqual(
EventHandlerResolver.parse(".fnControllerMethod; .fnControllerMethod(${ path:'/someModelProperty', formatter: '.myFormatter', type: 'sap.ui.model.type.String'} ); globalFunction"),
[".fnControllerMethod", ".fnControllerMethod(${ path:'/someModelProperty', formatter: '.myFormatter', type: 'sap.ui.model.type.String'} )", "globalFunction"]
);
});
QUnit.test("several event handlers with parameters and string literals", function (assert) {
assert.deepEqual(
EventHandlerResolver.parse(".fnControllerMethod('bad);luck'); .fnControllerMethod(\"\\\");\"); globalFunction"),
[".fnControllerMethod('bad);luck')", ".fnControllerMethod(\"\\\");\")", "globalFunction"]
);
});
});
| SAP/openui5 | src/sap.ui.core/test/sap/ui/core/qunit/mvc/EventHandlerResolver.qunit.js | JavaScript | apache-2.0 | 27,088 |
/*
* Copyright 2017 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.rendering.dag.nodes;
import org.terasology.assets.ResourceUrn;
import org.terasology.config.Config;
import org.terasology.config.RenderingConfig;
import org.terasology.context.Context;
import org.terasology.rendering.nui.properties.Range;
import org.terasology.rendering.opengl.FBOConfig;
import org.terasology.rendering.opengl.fbms.DisplayResolutionDependentFBOs;
/**
* This class is a thin facade in front of the BlurNode class it inherits from.
* The term "late" is due to the fact that this type of nodes is used near the
* end of the rendering process leading to an image shown on the user display.
*
* Given an input FBO a blurred version of it will be stored in the given output FBO.
* Eventually the blurred version can be used for blur-based effects such as
* Depth of Field.
*
* For more information on Blur: https://en.wikipedia.org/wiki/Box_blur
* For more information on DoF: http://en.wikipedia.org/wiki/Depth_of_field
*/
public class LateBlurNode extends BlurNode {
public static final ResourceUrn FIRST_LATE_BLUR_FBO = new ResourceUrn("engine:fbo.firstLateBlur");
public static final ResourceUrn SECOND_LATE_BLUR_FBO = new ResourceUrn("engine:fbo.secondLateBlur");
@Range(min = 0.0f, max = 16.0f)
private static final float OVERALL_BLUR_RADIUS_FACTOR = 0.8f;
private RenderingConfig renderingConfig;
/**
* Constructs a LateBlurNode instance.
*
* @param inputFboConfig an FBOConfig describing the input FBO, to be retrieved from an injected DisplayResolutionDependentFBOs instance.
* @param outputFboConfig an FBOConfig describing the output FBO, to be retrieved from an injected DisplayResolutionDependentFBOs instance.
* @param label a String to label the instance's entry in output generated by the PerformanceMonitor
*/
public LateBlurNode(Context context, FBOConfig inputFboConfig, FBOConfig outputFboConfig, String label) {
super(context, inputFboConfig, outputFboConfig, context.get(DisplayResolutionDependentFBOs.class), 0, label); // note: blurRadius is 0.0 at this stage.
updateBlurRadius(); // only here blurRadius is properly set.
}
/**
* This method establishes the conditions in which the blur will take place, by enabling or disabling the node.
*
* In this particular case the node is enabled if RenderingConfig.getBlurIntensity is not 0 - or blur is enabled.
*/
@Override
protected void setupConditions(Context context) {
renderingConfig = context.get(Config.class).getRendering();
renderingConfig.subscribe(RenderingConfig.BLUR_INTENSITY, this);
requiresCondition(() -> renderingConfig.getBlurIntensity() != 0);
}
/**
* An instance of this class is a subscriber to an FBOManager and to the RenderingConfig. The first
* invokes this method when FBOs have been regenerated (i.e. after resizing the display), while the
* second does it when its BLUR_INTENSITY parameter changes.
*/
@Override
public void update() {
super.update();
updateBlurRadius();
}
private void updateBlurRadius() {
this.blurRadius = OVERALL_BLUR_RADIUS_FACTOR * Math.max(1, renderingConfig.getBlurIntensity());
}
}
| Vizaxo/Terasology | engine/src/main/java/org/terasology/rendering/dag/nodes/LateBlurNode.java | Java | apache-2.0 | 3,861 |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Techradar
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
| patdeegan/techradar | config/application.rb | Ruby | apache-2.0 | 986 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CloudStorage.Azure {
public class Blob {
public Int32 Index { get; set; }
public String Name { get; set; }
public Properties Properties { get; set; }
}
}
| jusfr/CloudStorage | src/CloudStorage.Azure/Blob.cs | C# | apache-2.0 | 329 |
import { Constructor } from '../type';
import { Serializer as ISerializer, SerializersFactory } from '../serializers';
import { NoSerializerError } from '../errors';
import { __FIELD_SERIALIZER_METADATA_KEY } from '../metadata/metadata.keys';
import { defineFieldName } from '../metadata/define-field-name';
/**
* Defines a serializer for any serialization or deserialization of your field
*/
export function Type(): PropertyDecorator;
/**
* Defines a serializer by passed parameter @param modelType
* Try to find a serializer for your model type in {@link SerializersFactory}
*
* @throws {NoSerializerError} if serializer was missing
* @param modelType Constructor of model wich registered as {@Model}
*/
export function Type<T extends Object>(modelType: Constructor<T>): PropertyDecorator;
/**
* Defines your custom serializer for this field
* It will be used for serialization and deserialization
*
* @param serializer Custom serializer for this field only!
*/
export function Type<T extends Object>(serializer: ISerializer<T>): PropertyDecorator;
export function Type<T extends Object>(serializerOrType?: Constructor<T> | ISerializer<T>): PropertyDecorator {
return (target: Object, propertyKey: string | symbol): void => {
const key = propertyKey.toString();
const serializer = getSerializerFromParams(Reflect.getMetadata('design:type', target, propertyKey), key, serializerOrType);
Reflect.defineMetadata(__FIELD_SERIALIZER_METADATA_KEY, serializer, target, key);
defineFieldName(target, key);
};
}
function getSerializerFromParams<T>(
defaultType: Constructor<T>,
propertyName: string,
serializerOrType?: Constructor<T> | ISerializer<T>
): ISerializer<T> {
if (typeof serializerOrType === 'object') {
return serializerOrType;
}
return getSerializerForType(serializerOrType || defaultType, propertyName);
}
/**
* @throws {NoSerializerError} if serializer for this type not found
* @throws {Error} if type was undefined
* @param type Model type
*/
function getSerializerForType<T>(type: Constructor<T>, propertyName: string): ISerializer<T> {
if (type === undefined) {
throw new Error('Count find type for field: ' + propertyName);
}
const serializer = SerializersFactory.instance.getSerializer(type);
if (serializer === undefined) {
throw new NoSerializerError(propertyName);
}
return serializer;
}
| AndreyZelenskiy/serialize | src/decorators/type.decorator.ts | TypeScript | apache-2.0 | 2,381 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.mdm.agent.proxy.beans;
import org.json.JSONObject;
import org.wso2.mdm.agent.proxy.utils.Constants.HTTP_METHODS;
import java.util.Map;
public class EndPointInfo {
private HTTP_METHODS httpMethod;
private String endPoint;
private JSONObject requestParams;
private Map<String, String> requestParamsMap;
public Map<String, String> getRequestParamsMap() {
return requestParamsMap;
}
public void setRequestParamsMap(Map<String, String> requestParams) {
this.requestParamsMap = requestParams;
}
public JSONObject getRequestParams() {
return requestParams;
}
public void setRequestParams(JSONObject requestParams) {
this.requestParams = requestParams;
}
public HTTP_METHODS getHttpMethod() {
return httpMethod;
}
public void setHttpMethod(HTTP_METHODS httpMethod) {
this.httpMethod = httpMethod;
}
public String getEndPoint() {
return endPoint;
}
public void setEndPoint(String endPoint) {
this.endPoint = endPoint;
}
}
| ayyoob/product-iot-server | modules/tools/mdm-android-agent-archetype/src/main/resources/archetype-resources/src/main/plugins/IDPProxy/src/main/java/org/wso2/mdm/agent/proxy/beans/EndPointInfo.java | Java | apache-2.0 | 1,648 |
#!/usr/bin/env python
# encoding: utf-8
"""
populate_rango.py
Created by Luis C. Berrocal on 2013-10-20.
Copyright (c) 2013 __MyCompanyName__. All rights reserved.
"""
import os
import sys
def populate():
python_cat = add_cat('Python', 128,64)
add_page(cat=python_cat,
title="Official Python Tutorial",
url="http://docs.python.org/2/tutorial/")
add_page(cat=python_cat,
title="How to Think like a Computer Scientist",
url="http://www.greenteapress.com/thinkpython/")
add_page(cat=python_cat,
title="Learn Python in 10 Minutes",
url="http://www.korokithakis.net/tutorials/python/")
django_cat = add_cat("Django",64, 32)
add_page(cat=django_cat,
title="Official Django Tutorial",
url="https://docs.djangoproject.com/en/1.5/intro/tutorial01/")
add_page(cat=django_cat,
title="Django Rocks",
url="http://www.djangorocks.com/")
add_page(cat=django_cat,
title="How to Tango with Django",
url="http://www.tangowithdjango.com/")
frame_cat = add_cat("Other Frameworks", 32, 16)
add_page(cat=frame_cat,
title="Bottle",
url="http://bottlepy.org/docs/dev/")
add_page(cat=frame_cat,
title="Flask",
url="http://flask.pocoo.org")
# Print out what we have added to the user.
for c in Category.objects.all():
for p in Page.objects.filter(category=c):
print "- {0} - {1}".format(str(c), str(p))
def add_page(cat, title, url, views=0):
p = Page.objects.get_or_create(category=cat, title=title, url=url, views=views)[0]
return p
def add_cat(name, views, likes):
c = Category.objects.get_or_create(name=name, views=views, likes=likes)[0]
return c
# Start execution here!
if __name__ == '__main__':
print "Starting Rango population script..."
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
from rango.models import Category, Page
populate()
| luiscberrocal/rango_tutorial | populate_rango.py | Python | apache-2.0 | 2,007 |
module Rtermsuite
module Pipeline
class SpotterStream
def initialize lang, *args
@lang = lang
@tsp = Java::EuProjectTtcTools::TermSuitePipeline.create(@lang, "file:")
.setResourcePath(Rtermsuite.configuration.resource_path)
.aeWordTokenizer()
.setTreeTaggerHome(Rtermsuite.configuration.treetagger_home)
.aeTreeTagger()
.aeUrlFilter()
.aeStemmer()
.aeRegexSpotter()
end
def stream &consumer_block
java_provider = @tsp.stream(JRubyConsumer.new(consumer_block))
JRubyStream.new(java_provider)
end
private
class JRubyDocument
include Java::EuProjectTtcReaders::CollectionDocument
def initialize args={}
@h = args
end
def getUri
@h[:uri]
end
def getText
@h[:text]
end
end
class JRubyStream
def initialize jstream
@jstream = jstream
end
def add_document opts
@jstream.add_document JRubyDocument.new(opts)
end
def flush
@jstream.flush
end
end
class JRubyConsumer
include Java::EuProjectTtcStream.CasConsumer
def initialize block
@consumer_block = block
end
def consume ts_cas
@consumer_block.call ts_cas
end
end
end
end
end
| termsuite/rtermsuite | lib/rtermsuite/pipeline/streaming_spotter.rb | Ruby | apache-2.0 | 1,451 |
package lithium.classloadertest;
public class ClassLoaderException extends RuntimeException {
private static final long serialVersionUID = 1L;
public ClassLoaderException(String msg) {
super(msg);
}
public ClassLoaderException(String msg, Throwable e) {
super(msg, e);
}
}
| lithiumtech/multiverse-test | src/main/java/lithium/classloadertest/ClassLoaderException.java | Java | apache-2.0 | 285 |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.ejb.entity;
import javax.ejb.CreateException;
import javax.ejb.EntityBean;
import javax.ejb.RemoveException;
import org.apache.log4j.Logger;
import org.dcm4chex.archive.ejb.interfaces.FileDTO;
import org.dcm4chex.archive.ejb.interfaces.FileSystemLocal;
import org.dcm4chex.archive.ejb.interfaces.InstanceLocal;
import org.dcm4chex.archive.ejb.interfaces.MD5;
/**
* @author <a href="mailto:gunter@tiani.com">Gunter Zeilinger</a>
* @version $Revision: 2398 $ $Date: 2006-04-06 00:56:57 +0800 (周四, 06 4月 2006) $
*
* @ejb.bean name="File" type="CMP" view-type="local" primkey-field="pk"
* local-jndi-name="ejb/File"
* @ejb.persistence table-name="files"
* @ejb.transaction type="Required"
* @jboss.entity-command name="hsqldb-fetch-key"
* @jboss.audit-created-time field-name="createdTime"
*
* @ejb.finder signature="java.util.Collection findDereferencedInFileSystem(java.lang.String dirPath, int limit)"
* query="" transaction-type="Supports"
* @jboss.query signature="java.util.Collection findDereferencedInFileSystem(java.lang.String dirPath, int limit)"
* query="SELECT OBJECT(f) FROM File AS f WHERE f.instance IS NULL AND f.fileSystem.directoryPath = ?1 LIMIT ?2"
* strategy="on-find" eager-load-group="*"
* @ejb.finder signature="java.util.Collection findFilesToCompress(java.lang.String dirPath, java.lang.String cuid, java.sql.Timestamp before, int limit)"
* query="" transaction-type="Supports"
* @jboss.query signature="java.util.Collection findFilesToCompress(java.lang.String dirPath, java.lang.String cuid, java.sql.Timestamp before, int limit)"
* query="SELECT OBJECT(f) FROM File AS f WHERE f.fileStatus = 0 AND f.fileTsuid IN ('1.2.840.10008.1.2','1.2.840.10008.1.2.1','1.2.840.10008.1.2.2') AND f.fileSystem.directoryPath = ?1 AND f.instance.sopCuid = ?2 AND (f.createdTime IS NULL OR f.createdTime < ?3) LIMIT ?4"
* strategy="on-find" eager-load-group="*"
* @ejb.finder signature="java.util.Collection findToCheckMd5(java.lang.String dirPath, java.sql.Timestamp before, int limit)"
* query="" transaction-type="Supports"
* @jboss.query signature="java.util.Collection findToCheckMd5(java.lang.String dirPath, java.sql.Timestamp before, int limit)"
* query="SELECT OBJECT(f) FROM File AS f WHERE f.fileSystem.directoryPath = ?1 AND f.fileMd5Field IS NOT NULL AND (f.timeOfLastMd5Check IS NULL OR f.timeOfLastMd5Check < ?2) LIMIT ?3"
* strategy="on-find" eager-load-group="*"
* @ejb.finder signature="java.util.Collection findByStatusAndFileSystem(java.lang.String dirPath, int status, java.sql.Timestamp before, int limit)"
* query="" transaction-type="Supports"
* @jboss.query signature="java.util.Collection findByStatusAndFileSystem(java.lang.String dirPath, int status, java.sql.Timestamp before, int limit)"
* query="SELECT OBJECT(f) FROM File AS f WHERE f.fileSystem.directoryPath = ?1 AND f.fileStatus = ?2 AND (f.createdTime IS NULL OR f.createdTime < ?3) ORDER BY f.pk LIMIT ?4"
* strategy="on-find" eager-load-group="*"
*/
public abstract class FileBean implements EntityBean {
private static final Logger log = Logger.getLogger(FileBean.class);
/**
* Auto-generated Primary Key
*
* @ejb.interface-method
* @ejb.pk-field
* @ejb.persistence column-name="pk"
* @jboss.persistence auto-increment="true"
*/
public abstract Integer getPk();
public abstract void setPk(Integer pk);
/**
* @ejb.interface-method
* @ejb.persistence column-name="created_time"
*/
public abstract java.sql.Timestamp getCreatedTime();
public abstract void setCreatedTime(java.sql.Timestamp time);
/**
* @ejb.interface-method
* @ejb.persistence column-name="md5_check_time"
*/
public abstract java.sql.Timestamp getTimeOfLastMd5Check();
/**
* @ejb.interface-method
*/
public abstract void setTimeOfLastMd5Check(java.sql.Timestamp time);
/**
* File Path (relative path to Directory).
*
* @ejb.interface-method
* @ejb.persistence column-name="filepath"
*/
public abstract String getFilePath();
public abstract void setFilePath(String path);
/**
* Transfer Syntax UID
*
* @ejb.interface-method
* @ejb.persistence column-name="file_tsuid"
*/
public abstract String getFileTsuid();
public abstract void setFileTsuid(String tsuid);
/**
* MD5 checksum as hex string
*
* @ejb.interface-method
* @ejb.persistence column-name="file_md5"
*/
public abstract String getFileMd5Field();
public abstract void setFileMd5Field(String md5);
/**
* @ejb.interface-method
* @ejb.persistence column-name="file_status"
*/
public abstract int getFileStatus();
/**
* @ejb.interface-method
*/
public abstract void setFileStatus(int status);
/**
* MD5 checksum in binary format
*
* @ejb.interface-method
*/
public byte[] getFileMd5() {
return MD5.toBytes(getFileMd5Field());
}
public void setFileMd5(byte[] md5) {
setFileMd5Field(MD5.toString(md5));
}
/**
* File Size
*
* @ejb.interface-method
* @ejb.persistence column-name="file_size"
*/
public abstract int getFileSize();
public abstract void setFileSize(int size);
/**
* @ejb.interface-method
* @ejb.relation name="instance-files"
* role-name="files-of-instance"
* @jboss.relation fk-column="instance_fk"
* related-pk-field="pk"
*/
public abstract void setInstance(InstanceLocal inst);
/**
* @ejb.interface-method
*/
public abstract InstanceLocal getInstance();
/**
* @ejb.interface-method
* @ejb.relation name="filesystem-files"
* role-name="files-of-filesystem"
* target-role-name="filesystem-of-file"
* target-ejb="FileSystem"
* target-multiple="yes"
* @jboss.relation fk-column="filesystem_fk"
* related-pk-field="pk"
*/
public abstract void setFileSystem(FileSystemLocal fs);
/**
* @ejb.interface-method
*/
public abstract FileSystemLocal getFileSystem();
/**
* @ejb.interface-method
*/
public boolean isRedundant() {
InstanceLocal inst = getInstance();
return inst == null || inst.getFiles().size() > 1;
}
/**
* @ejb.interface-method
* @jboss.method-attributes read-only="true"
*/
public FileDTO getFileDTO() {
FileSystemLocal fs = getFileSystem();
FileDTO retval = new FileDTO();
retval.setPk(getPk().intValue());
retval.setRetrieveAET(fs.getRetrieveAET());
retval.setDirectoryPath(fs.getDirectoryPath());
retval.setAvailability(fs.getAvailability());
retval.setUserInfo(fs.getUserInfo());
retval.setFilePath(getFilePath());
retval.setFileTsuid(getFileTsuid());
retval.setFileSize(getFileSize());
retval.setFileMd5(getFileMd5());
retval.setFileStatus(getFileStatus());
InstanceLocal inst = getInstance();
if (inst != null)
retval.setSopClassUID(inst.getSopCuid());
return retval;
}
/**
* @ejb.interface-method
*/
public String asString() {
return prompt();
}
private String prompt() {
return "File[pk=" + getPk() + ", filepath=" + getFilePath()
+ ", tsuid=" + getFileTsuid() + ", filesystem->"
+ getFileSystem() + ", inst->" + getInstance() + "]";
}
/**
* Create file.
*
* @ejb.create-method
*/
public Integer ejbCreate(String path, String tsuid, int size, byte[] md5,
int status, InstanceLocal instance, FileSystemLocal filesystem)
throws CreateException {
setFilePath(path);
setFileTsuid(tsuid);
setFileSize(size);
setFileMd5(md5);
setFileStatus(status);
return null;
}
public void ejbPostCreate(String path, String tsuid, int size, byte[] md5,
int status, InstanceLocal instance, FileSystemLocal filesystem)
throws CreateException {
setInstance(instance);
setFileSystem(filesystem);
log.info("Created " + prompt());
}
public void ejbRemove() throws RemoveException {
log.info("Deleting " + prompt());
}
}
| medicayun/medicayundicom | dcm4jboss-all/branches/DCM4JBOSS_2_7_BRANCH/dcm4jboss-ejb/src/java/org/dcm4chex/archive/ejb/entity/FileBean.java | Java | apache-2.0 | 10,510 |
package com.logginghub.utils;
public class AbstractSource<T> {
}
| logginghub/core | logginghub-utils/src/main/java/com/logginghub/utils/AbstractSource.java | Java | apache-2.0 | 72 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigatewayv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents an integration response.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class IntegrationResponse implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions. Supported
* values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:
* </p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response to the
* route response or method response without modification.
* </p>
*/
private String contentHandlingStrategy;
/**
* <p>
* The integration response ID.
* </p>
*/
private String integrationResponseId;
/**
* <p>
* The integration response key.
* </p>
*/
private String integrationResponseKey;
/**
* <p>
* A key-value map specifying response parameters that are passed to the method response from the backend. The key
* is a method response header parameter name and the mapped value is an integration response header value, a static
* value enclosed within a pair of single quotes, or a JSON expression from the integration response body. The
* mapping key must match the pattern of method.response.header.{name}, where name is a valid and unique header
* name. The mapped non-static value must match the pattern of integration.response.header.{name} or
* integration.response.body.{JSON-expression}, where name is a valid and unique response header name and
* JSON-expression is a valid JSON expression without the $ prefix.
* </p>
*/
private java.util.Map<String, String> responseParameters;
/**
* <p>
* The collection of response templates for the integration response as a string-to-string map of key-value pairs.
* Response templates are represented as a key/value map, with a content-type as the key and a template as the
* value.
* </p>
*/
private java.util.Map<String, String> responseTemplates;
/**
* <p>
* The template selection expressions for the integration response.
* </p>
*/
private String templateSelectionExpression;
/**
* <p>
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions. Supported
* values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:
* </p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response to the
* route response or method response without modification.
* </p>
*
* @param contentHandlingStrategy
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions.
* Supported values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:</p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary
* blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response
* to the route response or method response without modification.
* @see ContentHandlingStrategy
*/
public void setContentHandlingStrategy(String contentHandlingStrategy) {
this.contentHandlingStrategy = contentHandlingStrategy;
}
/**
* <p>
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions. Supported
* values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:
* </p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response to the
* route response or method response without modification.
* </p>
*
* @return Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions.
* Supported values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:</p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary
* blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration
* response to the route response or method response without modification.
* @see ContentHandlingStrategy
*/
public String getContentHandlingStrategy() {
return this.contentHandlingStrategy;
}
/**
* <p>
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions. Supported
* values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:
* </p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response to the
* route response or method response without modification.
* </p>
*
* @param contentHandlingStrategy
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions.
* Supported values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:</p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary
* blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response
* to the route response or method response without modification.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ContentHandlingStrategy
*/
public IntegrationResponse withContentHandlingStrategy(String contentHandlingStrategy) {
setContentHandlingStrategy(contentHandlingStrategy);
return this;
}
/**
* <p>
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions. Supported
* values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:
* </p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response to the
* route response or method response without modification.
* </p>
*
* @param contentHandlingStrategy
* Supported only for WebSocket APIs. Specifies how to handle response payload content type conversions.
* Supported values are CONVERT_TO_BINARY and CONVERT_TO_TEXT, with the following behaviors:</p>
* <p>
* CONVERT_TO_BINARY: Converts a response payload from a Base64-encoded string to the corresponding binary
* blob.
* </p>
* <p>
* CONVERT_TO_TEXT: Converts a response payload from a binary blob to a Base64-encoded string.
* </p>
* <p>
* If this property is not defined, the response payload will be passed through from the integration response
* to the route response or method response without modification.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ContentHandlingStrategy
*/
public IntegrationResponse withContentHandlingStrategy(ContentHandlingStrategy contentHandlingStrategy) {
this.contentHandlingStrategy = contentHandlingStrategy.toString();
return this;
}
/**
* <p>
* The integration response ID.
* </p>
*
* @param integrationResponseId
* The integration response ID.
*/
public void setIntegrationResponseId(String integrationResponseId) {
this.integrationResponseId = integrationResponseId;
}
/**
* <p>
* The integration response ID.
* </p>
*
* @return The integration response ID.
*/
public String getIntegrationResponseId() {
return this.integrationResponseId;
}
/**
* <p>
* The integration response ID.
* </p>
*
* @param integrationResponseId
* The integration response ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse withIntegrationResponseId(String integrationResponseId) {
setIntegrationResponseId(integrationResponseId);
return this;
}
/**
* <p>
* The integration response key.
* </p>
*
* @param integrationResponseKey
* The integration response key.
*/
public void setIntegrationResponseKey(String integrationResponseKey) {
this.integrationResponseKey = integrationResponseKey;
}
/**
* <p>
* The integration response key.
* </p>
*
* @return The integration response key.
*/
public String getIntegrationResponseKey() {
return this.integrationResponseKey;
}
/**
* <p>
* The integration response key.
* </p>
*
* @param integrationResponseKey
* The integration response key.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse withIntegrationResponseKey(String integrationResponseKey) {
setIntegrationResponseKey(integrationResponseKey);
return this;
}
/**
* <p>
* A key-value map specifying response parameters that are passed to the method response from the backend. The key
* is a method response header parameter name and the mapped value is an integration response header value, a static
* value enclosed within a pair of single quotes, or a JSON expression from the integration response body. The
* mapping key must match the pattern of method.response.header.{name}, where name is a valid and unique header
* name. The mapped non-static value must match the pattern of integration.response.header.{name} or
* integration.response.body.{JSON-expression}, where name is a valid and unique response header name and
* JSON-expression is a valid JSON expression without the $ prefix.
* </p>
*
* @return A key-value map specifying response parameters that are passed to the method response from the backend.
* The key is a method response header parameter name and the mapped value is an integration response header
* value, a static value enclosed within a pair of single quotes, or a JSON expression from the integration
* response body. The mapping key must match the pattern of method.response.header.{name}, where name is a
* valid and unique header name. The mapped non-static value must match the pattern of
* integration.response.header.{name} or integration.response.body.{JSON-expression}, where name is a valid
* and unique response header name and JSON-expression is a valid JSON expression without the $ prefix.
*/
public java.util.Map<String, String> getResponseParameters() {
return responseParameters;
}
/**
* <p>
* A key-value map specifying response parameters that are passed to the method response from the backend. The key
* is a method response header parameter name and the mapped value is an integration response header value, a static
* value enclosed within a pair of single quotes, or a JSON expression from the integration response body. The
* mapping key must match the pattern of method.response.header.{name}, where name is a valid and unique header
* name. The mapped non-static value must match the pattern of integration.response.header.{name} or
* integration.response.body.{JSON-expression}, where name is a valid and unique response header name and
* JSON-expression is a valid JSON expression without the $ prefix.
* </p>
*
* @param responseParameters
* A key-value map specifying response parameters that are passed to the method response from the backend.
* The key is a method response header parameter name and the mapped value is an integration response header
* value, a static value enclosed within a pair of single quotes, or a JSON expression from the integration
* response body. The mapping key must match the pattern of method.response.header.{name}, where name is a
* valid and unique header name. The mapped non-static value must match the pattern of
* integration.response.header.{name} or integration.response.body.{JSON-expression}, where name is a valid
* and unique response header name and JSON-expression is a valid JSON expression without the $ prefix.
*/
public void setResponseParameters(java.util.Map<String, String> responseParameters) {
this.responseParameters = responseParameters;
}
/**
* <p>
* A key-value map specifying response parameters that are passed to the method response from the backend. The key
* is a method response header parameter name and the mapped value is an integration response header value, a static
* value enclosed within a pair of single quotes, or a JSON expression from the integration response body. The
* mapping key must match the pattern of method.response.header.{name}, where name is a valid and unique header
* name. The mapped non-static value must match the pattern of integration.response.header.{name} or
* integration.response.body.{JSON-expression}, where name is a valid and unique response header name and
* JSON-expression is a valid JSON expression without the $ prefix.
* </p>
*
* @param responseParameters
* A key-value map specifying response parameters that are passed to the method response from the backend.
* The key is a method response header parameter name and the mapped value is an integration response header
* value, a static value enclosed within a pair of single quotes, or a JSON expression from the integration
* response body. The mapping key must match the pattern of method.response.header.{name}, where name is a
* valid and unique header name. The mapped non-static value must match the pattern of
* integration.response.header.{name} or integration.response.body.{JSON-expression}, where name is a valid
* and unique response header name and JSON-expression is a valid JSON expression without the $ prefix.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse withResponseParameters(java.util.Map<String, String> responseParameters) {
setResponseParameters(responseParameters);
return this;
}
/**
* Add a single ResponseParameters entry
*
* @see IntegrationResponse#withResponseParameters
* @returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse addResponseParametersEntry(String key, String value) {
if (null == this.responseParameters) {
this.responseParameters = new java.util.HashMap<String, String>();
}
if (this.responseParameters.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.responseParameters.put(key, value);
return this;
}
/**
* Removes all the entries added into ResponseParameters.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse clearResponseParametersEntries() {
this.responseParameters = null;
return this;
}
/**
* <p>
* The collection of response templates for the integration response as a string-to-string map of key-value pairs.
* Response templates are represented as a key/value map, with a content-type as the key and a template as the
* value.
* </p>
*
* @return The collection of response templates for the integration response as a string-to-string map of key-value
* pairs. Response templates are represented as a key/value map, with a content-type as the key and a
* template as the value.
*/
public java.util.Map<String, String> getResponseTemplates() {
return responseTemplates;
}
/**
* <p>
* The collection of response templates for the integration response as a string-to-string map of key-value pairs.
* Response templates are represented as a key/value map, with a content-type as the key and a template as the
* value.
* </p>
*
* @param responseTemplates
* The collection of response templates for the integration response as a string-to-string map of key-value
* pairs. Response templates are represented as a key/value map, with a content-type as the key and a
* template as the value.
*/
public void setResponseTemplates(java.util.Map<String, String> responseTemplates) {
this.responseTemplates = responseTemplates;
}
/**
* <p>
* The collection of response templates for the integration response as a string-to-string map of key-value pairs.
* Response templates are represented as a key/value map, with a content-type as the key and a template as the
* value.
* </p>
*
* @param responseTemplates
* The collection of response templates for the integration response as a string-to-string map of key-value
* pairs. Response templates are represented as a key/value map, with a content-type as the key and a
* template as the value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse withResponseTemplates(java.util.Map<String, String> responseTemplates) {
setResponseTemplates(responseTemplates);
return this;
}
/**
* Add a single ResponseTemplates entry
*
* @see IntegrationResponse#withResponseTemplates
* @returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse addResponseTemplatesEntry(String key, String value) {
if (null == this.responseTemplates) {
this.responseTemplates = new java.util.HashMap<String, String>();
}
if (this.responseTemplates.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.responseTemplates.put(key, value);
return this;
}
/**
* Removes all the entries added into ResponseTemplates.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse clearResponseTemplatesEntries() {
this.responseTemplates = null;
return this;
}
/**
* <p>
* The template selection expressions for the integration response.
* </p>
*
* @param templateSelectionExpression
* The template selection expressions for the integration response.
*/
public void setTemplateSelectionExpression(String templateSelectionExpression) {
this.templateSelectionExpression = templateSelectionExpression;
}
/**
* <p>
* The template selection expressions for the integration response.
* </p>
*
* @return The template selection expressions for the integration response.
*/
public String getTemplateSelectionExpression() {
return this.templateSelectionExpression;
}
/**
* <p>
* The template selection expressions for the integration response.
* </p>
*
* @param templateSelectionExpression
* The template selection expressions for the integration response.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IntegrationResponse withTemplateSelectionExpression(String templateSelectionExpression) {
setTemplateSelectionExpression(templateSelectionExpression);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getContentHandlingStrategy() != null)
sb.append("ContentHandlingStrategy: ").append(getContentHandlingStrategy()).append(",");
if (getIntegrationResponseId() != null)
sb.append("IntegrationResponseId: ").append(getIntegrationResponseId()).append(",");
if (getIntegrationResponseKey() != null)
sb.append("IntegrationResponseKey: ").append(getIntegrationResponseKey()).append(",");
if (getResponseParameters() != null)
sb.append("ResponseParameters: ").append(getResponseParameters()).append(",");
if (getResponseTemplates() != null)
sb.append("ResponseTemplates: ").append(getResponseTemplates()).append(",");
if (getTemplateSelectionExpression() != null)
sb.append("TemplateSelectionExpression: ").append(getTemplateSelectionExpression());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof IntegrationResponse == false)
return false;
IntegrationResponse other = (IntegrationResponse) obj;
if (other.getContentHandlingStrategy() == null ^ this.getContentHandlingStrategy() == null)
return false;
if (other.getContentHandlingStrategy() != null && other.getContentHandlingStrategy().equals(this.getContentHandlingStrategy()) == false)
return false;
if (other.getIntegrationResponseId() == null ^ this.getIntegrationResponseId() == null)
return false;
if (other.getIntegrationResponseId() != null && other.getIntegrationResponseId().equals(this.getIntegrationResponseId()) == false)
return false;
if (other.getIntegrationResponseKey() == null ^ this.getIntegrationResponseKey() == null)
return false;
if (other.getIntegrationResponseKey() != null && other.getIntegrationResponseKey().equals(this.getIntegrationResponseKey()) == false)
return false;
if (other.getResponseParameters() == null ^ this.getResponseParameters() == null)
return false;
if (other.getResponseParameters() != null && other.getResponseParameters().equals(this.getResponseParameters()) == false)
return false;
if (other.getResponseTemplates() == null ^ this.getResponseTemplates() == null)
return false;
if (other.getResponseTemplates() != null && other.getResponseTemplates().equals(this.getResponseTemplates()) == false)
return false;
if (other.getTemplateSelectionExpression() == null ^ this.getTemplateSelectionExpression() == null)
return false;
if (other.getTemplateSelectionExpression() != null && other.getTemplateSelectionExpression().equals(this.getTemplateSelectionExpression()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getContentHandlingStrategy() == null) ? 0 : getContentHandlingStrategy().hashCode());
hashCode = prime * hashCode + ((getIntegrationResponseId() == null) ? 0 : getIntegrationResponseId().hashCode());
hashCode = prime * hashCode + ((getIntegrationResponseKey() == null) ? 0 : getIntegrationResponseKey().hashCode());
hashCode = prime * hashCode + ((getResponseParameters() == null) ? 0 : getResponseParameters().hashCode());
hashCode = prime * hashCode + ((getResponseTemplates() == null) ? 0 : getResponseTemplates().hashCode());
hashCode = prime * hashCode + ((getTemplateSelectionExpression() == null) ? 0 : getTemplateSelectionExpression().hashCode());
return hashCode;
}
@Override
public IntegrationResponse clone() {
try {
return (IntegrationResponse) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.apigatewayv2.model.transform.IntegrationResponseMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| aws/aws-sdk-java | aws-java-sdk-apigatewayv2/src/main/java/com/amazonaws/services/apigatewayv2/model/IntegrationResponse.java | Java | apache-2.0 | 28,169 |
/*
* Copyright (c) 2011-2015 BlackBerry Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "applicationui.hpp"
#include <bb/cascades/Application>
#include <bb/cascades/QmlDocument>
#include <bb/cascades/AbstractPane>
#include <bb/cascades/LocaleHandler>
#include <bb/system/SystemDialog>
using namespace bb::cascades;
using namespace bb::system;
ApplicationUI::ApplicationUI() :
QObject()
{
// prepare the localization
m_pTranslator = new QTranslator(this);
m_pLocaleHandler = new LocaleHandler(this);
bool res = QObject::connect(m_pLocaleHandler, SIGNAL(systemLanguageChanged()), this, SLOT(onSystemLanguageChanged()));
// This is only available in Debug builds
Q_ASSERT(res);
// Since the variable is not used in the amp, this is added to avoid a
// compiler warning
Q_UNUSED(res);
// initial load
onSystemLanguageChanged();
// Create scene document from main.qml asset, the parent is set
// to ensure the document gets destroyed properly at shut down.
QmlDocument *qml = QmlDocument::create("asset:///main.qml").parent(this);
qml->setContextProperty("_app", this);
// Create root object for the UI
AbstractPane *root = qml->createRootObject<AbstractPane>();
// Set created root object as the application scene
Application::instance()->setScene(root);
m_context = new bb::platform::bbm::Context(
//UUID was generated at random for this sample
//BE SURE TO USE YOUR OWN UNIQUE UUID. You can grenade one here: http://www.guidgenerator.com/
QUuid("6572670a-f4a2-4a32-b960-543bdd2b10f1"));
if (m_context->registrationState()
!= bb::platform::bbm::RegistrationState::Allowed) {
connect(m_context,
SIGNAL(registrationStateUpdated (bb::platform::bbm::RegistrationState::Type)),
this,
SLOT(registrationStateUpdated (bb::platform::bbm::RegistrationState::Type)));
m_context->requestRegisterApplication();
}
}
void ApplicationUI::inviteUserToDownloadViaBBM() {
if (m_context->registrationState()
== bb::platform::bbm::RegistrationState::Allowed) {
m_messageService->sendDownloadInvitation();
} else {
SystemDialog *bbmDialog = new SystemDialog("OK");
bbmDialog->setTitle("BBM Connection Error");
bbmDialog->setBody(
"BBM is not currently connected. Please setup / sign-in to BBM to remove this message.");
connect(bbmDialog, SIGNAL(finished(bb::system::SystemUiResult::Type)),
this, SLOT(dialogFinished(bb::system::SystemUiResult::Type)));
bbmDialog->show();
return;
}
}
void ApplicationUI::updatePersonalMessage(const QString &message) {
if (m_context->registrationState()
== bb::platform::bbm::RegistrationState::Allowed) {
m_userProfile->requestUpdatePersonalMessage(message);
} else {
SystemDialog *bbmDialog = new SystemDialog("OK");
bbmDialog->setTitle("BBM Connection Error");
bbmDialog->setBody(
"BBM is not currently connected. Please setup / sign-in to BBM to remove this message.");
connect(bbmDialog, SIGNAL(finished(bb::system::SystemUiResult::Type)),
this, SLOT(dialogFinished(bb::system::SystemUiResult::Type)));
bbmDialog->show();
return;
}
}
void ApplicationUI::registrationStateUpdated(
bb::platform::bbm::RegistrationState::Type state) {
if (state == bb::platform::bbm::RegistrationState::Allowed) {
m_messageService = new bb::platform::bbm::MessageService(m_context,
this);
m_userProfile = new bb::platform::bbm::UserProfile(m_context, this);
} else if (state == bb::platform::bbm::RegistrationState::Unregistered) {
m_context->requestRegisterApplication();
}
}
void ApplicationUI::onSystemLanguageChanged()
{
QCoreApplication::instance()->removeTranslator(m_pTranslator);
// Initiate, load and install the application translation files.
QString locale_string = QLocale().name();
QString file_name = QString("Clouder_%1").arg(locale_string);
if (m_pTranslator->load(file_name, "app/native/qm")) {
QCoreApplication::instance()->installTranslator(m_pTranslator);
}
}
| syedshaishad/Clouder | src/applicationui.cpp | C++ | apache-2.0 | 4,878 |
// -----------------------------------------------------------------------
// <copyright file="RestServiceController.cs">
// Copyright (c) 2015 Akka.NET Dragons Demo contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// -----------------------------------------------------------------------
namespace AkkaDotNetDragons.Service
{
using AkkaDotNetDragons.Core;
using AkkaDotNetDragons.Dto;
using RestSharp;
using ServiceStack;
/// <summary>
/// The HTTP REST based service controller.
/// </summary>
public class RestServiceController : IServiceController
{
/// <summary>
/// Creates a new instance of <see cref="RestServiceController"/>.
/// </summary>
/// <param name="host">The web application host URL.</param>
public RestServiceController(string host)
{
Host = host;
}
/// <summary>
/// Gets or sets the web application host URL.
/// </summary>
private string Host { get; }
/// <summary>
/// Push the current environment status to clients.
/// </summary>
public void PushStatus()
{
var command = new EnvironmentCommand { Command = EnvironmentCommands.Status }.ToPostUrl();
var client = new RestClient(Host);
var request = new RestRequest(command, Method.POST);
client.ExecuteAsync(request, response => {});
}
/// <summary>
/// Push a message to clients.
/// </summary>
/// <param name="messageType">The message type.</param>
/// <param name="name">The message sender name.</param>
/// <param name="message">The message to push.</param>
public void PushMessage(MessageTypes messageType, string name, string message)
{
var command = new EnvironmentMessage().ToPostUrl();
var client = new RestClient(Host);
var request = new RestRequest(command, Method.POST);
request.AddParameter("messageType", messageType);
request.AddParameter("name", name);
request.AddParameter("message", message);
client.ExecuteAsync(request, response => {});
}
}
}
| hhandoko/akka-dotnet-ddd | src/AkkaDotNetDragons.Service/RestServiceController.cs | C# | apache-2.0 | 2,805 |
package org.sagebionetworks.web.client.widget.entity.tabs;
import java.util.ArrayList;
import java.util.List;
import org.sagebionetworks.repo.model.Entity;
import org.sagebionetworks.repo.model.EntityType;
import org.sagebionetworks.repo.model.table.Dataset;
import org.sagebionetworks.repo.model.table.Table;
import org.sagebionetworks.web.client.DisplayConstants;
import org.sagebionetworks.web.client.PortalGinInjector;
import org.sagebionetworks.web.client.place.Synapse.EntityArea;
import org.sagebionetworks.web.shared.WebConstants;
import com.google.inject.Inject;
/**
* Tab that shows Tables, EntityViews, and SubmissionViews.
*/
public class TablesTab extends AbstractTablesTab {
public static final String TABLES_HELP = "Build structured queryable data that can be described by a schema using the Tables.";
public static final String TABLES_HELP_URL = WebConstants.DOCS_URL + "Tables.2011038095.html";
@Inject
public TablesTab(Tab tab, PortalGinInjector ginInjector) {
super(tab, ginInjector);
this.tab = tab;
this.ginInjector = ginInjector;
tab.configure(DisplayConstants.TABLES, "table", TABLES_HELP, TABLES_HELP_URL, EntityArea.TABLES);
}
@Override
protected EntityArea getTabArea() {
return EntityArea.TABLES;
}
@Override
protected String getTabDisplayName() {
return DisplayConstants.TABLES;
}
@Override
protected List<EntityType> getTypesShownInList() {
List<EntityType> types = new ArrayList<>();
types.add(EntityType.table);
types.add(EntityType.entityview);
types.add(EntityType.submissionview);
types.add(EntityType.materializedview);
return types;
}
@Override
protected boolean isEntityShownInTab(Entity entity) {
return entity instanceof Table && !(entity instanceof Dataset);
}
}
| Sage-Bionetworks/SynapseWebClient | src/main/java/org/sagebionetworks/web/client/widget/entity/tabs/TablesTab.java | Java | apache-2.0 | 1,759 |
package org.apache.mesos.elasticsearch.systemtest;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import org.apache.log4j.Logger;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import static com.jayway.awaitility.Awaitility.await;
/**
* Response which waits until tasks endpoint is ready
*/
public class TasksResponse {
public static final Logger LOGGER = Logger.getLogger(DiscoverySystemTest.class);
private HttpResponse<JsonNode> response;
private String schedulerIpAddress;
private int nodesCount;
public TasksResponse(String schedulerIpAddress, int nodesCount) {
this.schedulerIpAddress = schedulerIpAddress;
this.nodesCount = nodesCount;
await().atMost(60, TimeUnit.SECONDS).until(new TasksCall());
}
class TasksCall implements Callable<Boolean> {
@Override
public Boolean call() throws Exception {
try {
String tasksEndPoint = "http://" + schedulerIpAddress + ":31100/v1/tasks";
LOGGER.debug("Fetching tasks on " + tasksEndPoint);
response = Unirest.get(tasksEndPoint).asJson();
return response.getBody().getArray().length() == nodesCount;
} catch (UnirestException e) {
LOGGER.debug("Waiting until " + nodesCount + " tasks are started...");
return false;
}
}
}
public HttpResponse<JsonNode> getJson() {
return response;
}
public List<JSONObject> getTasks() {
List<JSONObject> tasks = new ArrayList<>();
for (int i = 0; i < response.getBody().getArray().length(); i++) {
tasks.add(response.getBody().getArray().getJSONObject(i));
}
return tasks;
}
}
| openwis-ss/elasticsearch | system-test/src/systemTest/java/org/apache/mesos/elasticsearch/systemtest/TasksResponse.java | Java | apache-2.0 | 2,000 |
/*
*
*/
package net.community.apps.apache.maven.conv2maven;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Cursor;
import java.awt.Font;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.File;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.AbstractButton;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextPane;
import javax.swing.JToolBar;
import javax.swing.WindowConstants;
import javax.swing.text.BadLocationException;
import javax.swing.text.JTextComponent;
import net.community.apps.apache.maven.conv2maven.resources.ResourcesAnchor;
import net.community.apps.common.BaseMainFrame;
import net.community.chest.awt.attributes.AttrUtils;
import net.community.chest.dom.DOMUtils;
import net.community.chest.lang.ExceptionUtil;
import net.community.chest.lang.StringUtil;
import net.community.chest.swing.component.menu.MenuItemExplorer;
import net.community.chest.swing.component.scroll.ScrolledComponent;
import net.community.chest.ui.components.logging.LogMessagesArea;
import net.community.chest.ui.components.text.FolderAutoCompleter;
import net.community.chest.ui.helpers.button.HelperCheckBox;
import net.community.chest.ui.helpers.panel.input.LRFieldWithButtonPanel;
import net.community.chest.ui.helpers.text.InputTextField;
import net.community.chest.util.logging.LogLevelWrapper;
import net.community.chest.util.logging.LoggerWrapper;
import net.community.chest.util.logging.factory.WrapperFactoryManager;
import org.w3c.dom.Element;
/**
* <P>Copyright as per GPLv2</P>
* @author Lyor G.
* @since Nov 7, 2011 9:09:58 AM
*
*/
final class MainFrame extends BaseMainFrame<ResourcesAnchor> implements Runnable {
private static final long serialVersionUID = -6329514955688913319L;
protected static final LoggerWrapper _logger=WrapperFactoryManager.getLogger(MainFrame.class);
MainFrame () throws Exception
{
super();
}
/*
* @see net.community.apps.common.BaseMainFrame#getLogger()
*/
@Override
protected LoggerWrapper getLogger ()
{
return _logger;
}
private LogMessagesArea _logsArea;
public void log (LogLevelWrapper l, String msg)
{
if ((_logsArea == null) || (l == null)
|| (msg == null) || (msg.length() <= 0))
return;
try
{
_logsArea.log(l, msg);
}
catch (BadLocationException e)
{
_logger.error("log(" + l + ")[" + msg + "] failed: " + e.getMessage());
}
}
protected void clearLogMessagesArea ()
{
if (_logsArea != null)
_logsArea.setText("");
}
private HelperCheckBox _scanRecursive;
public boolean isRecursiveScanning ()
{
return (_scanRecursive == null) || _scanRecursive.isSelected();
}
public void setRecursiveScanning (boolean enabled)
{
if ((_scanRecursive == null) || (_scanRecursive.isSelected() == enabled))
return;
_scanRecursive.setSelected(enabled);
}
private final KeyListener _runOptionKeyListener=new KeyAdapter() {
/*
* @see java.awt.event.KeyAdapter#keyReleased(java.awt.event.KeyEvent)
*/
@Override
public void keyReleased (KeyEvent e)
{
if (e == null)
return;
updateOkToRun();
}
};
private LRFieldWithButtonPanel _rootSelector;
private FolderAutoCompleter<JTextComponent> _rootCompleter;
/*
* @see net.community.apps.common.BaseMainFrame#layoutSection(java.lang.String, org.w3c.dom.Element)
*/
@Override
public void layoutSection (String name, Element elem) throws RuntimeException
{
if (_logger.isDebugEnabled())
_logger.debug("layoutSection(" + name + ")[" + DOMUtils.toString(elem) + "]");
if ("root-selector".equalsIgnoreCase(name))
{
if (_rootSelector != null)
throw new IllegalStateException("layoutSection(" + name + ") already initialized for " + DOMUtils.toString(elem));
// delay initialization
_rootSelector = new LRFieldWithButtonPanel(elem, false);
_rootSelector.setTextField(new InputTextField());
_rootSelector.layoutComponent();
_rootSelector.addActionListener(getLoadFileListener());
_rootSelector.addTextFieldKeyListener(_runOptionKeyListener);
_rootCompleter = new FolderAutoCompleter<JTextComponent>(_rootSelector.getTextField());
}
else if ("log-msgs-area".equalsIgnoreCase(name))
{
if (_logsArea != null)
throw new IllegalStateException("layoutSection(" + name + ") already initialized for " + DOMUtils.toString(elem));
_logsArea = new LogMessagesArea(Font.getFont(Font.DIALOG), elem);
}
else if ("scan-recursive".equalsIgnoreCase(name))
{
if (_scanRecursive != null)
throw new IllegalStateException("layoutSection(" + name + ") already initialized for " + DOMUtils.toString(elem));
_scanRecursive = new HelperCheckBox(elem);
}
else
super.layoutSection(name, elem);
}
public final String getRootFolder ()
{
return (_rootCompleter == null) ? null : _rootCompleter.getText();
}
private void setRootFolder (File rootFolder)
{
try
{
if (rootFolder == null)
return;
if (!rootFolder.isDirectory())
{
_logger.error("Referenced file is not a folder: " + rootFolder.getAbsolutePath());
return;
}
if (_rootCompleter == null)
return;
final String rootPath=rootFolder.getAbsolutePath(),
prev=_rootCompleter.getText();
if (0 == StringUtil.compareDataStrings(prev, rootPath, true))
return;
_rootCompleter.setText(rootPath);
}
finally
{
updateOkToRun();
}
}
/*
* @see net.community.apps.common.BaseMainFrame#loadFile(java.io.File, java.lang.String, org.w3c.dom.Element)
*/
@Override
public void loadFile (final File f, final String cmd, final Element dlgElement)
{
setRootFolder(f);
}
/*
* @see net.community.apps.common.MainComponent#getResourcesAnchor()
*/
@Override
public ResourcesAnchor getResourcesAnchor ()
{
return ResourcesAnchor.getInstance();
}
protected AbstractButton _runBtn, _stopBtn;
private boolean _running /* =false */;
protected boolean isOkToRun ()
{
final String rootFolder=getRootFolder();
if ((rootFolder == null) || (rootFolder.length() <= 0))
return false;
if (_runner != null)
return false;
return !_running;
}
protected boolean updateOkToRun ()
{
if ((_runBtn == null) || (_runMenuItem == null))
return false;
final boolean okToRun=isOkToRun();
_runBtn.setEnabled(okToRun);
_runMenuItem.setEnabled(okToRun);
return okToRun;
}
protected JMenuItem _runMenuItem, _loadMenuItem, _stopMenuItem;
protected void setRunningMode (boolean running)
{
if (_running != running)
{
AttrUtils.setComponentEnabledState(!running,
_rootSelector, _runBtn, _scanRecursive,
_runMenuItem, _loadMenuItem);
final Cursor c=running
? Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)
: Cursor.getDefaultCursor()
;
if ((c != null) && (_logsArea != null))
_logsArea.setCursor(c);
AttrUtils.setComponentEnabledState(running, _stopMenuItem, _stopBtn);
_running = running;
}
}
private ProjectConverter _runner;
/*
* @see java.lang.Runnable#run()
*/
@Override
public void run ()
{
if (_runner != null)
{
JOptionPane.showMessageDialog(this, "Stop current conversion before starting another", "Conversion in progress", JOptionPane.ERROR_MESSAGE);
return;
}
final String rootFolder=getRootFolder();
if ((rootFolder == null) || (rootFolder.length() <= 0))
{
JOptionPane.showMessageDialog(this, "Missing root folder", "Incomplete arguments", JOptionPane.ERROR_MESSAGE);
return;
}
clearLogMessagesArea();
_runner = new ProjectConverter(this);
setRunningMode(true);
_runner.execute();
}
void signalConversionDone (final ProjectConverter r)
{
if (r != null)
{
if (_runner != r)
_logger.warn("signalConversionDone() mismatched instances");
_runner = null;
}
setRunningMode(false);
}
protected void stop ()
{
if ((_runner == null) || _runner.isDone() || _runner.isCancelled())
return;
_runner.cancel(false);
_logger.info("Canceled by user request");
}
private static final String RUN_CMD="run", CLEAR_CMD="clear", STOP_CMD="stop";
/*
* @see net.community.apps.common.FilesLoadMainFrame#setMainMenuItemsActionHandlers(net.community.chest.swing.component.menu.MenuItemExplorer)
*/
@Override
protected Map<String,JMenuItem> setMainMenuItemsActionHandlers (MenuItemExplorer ie)
{
final Map<String,JMenuItem> im=super.setMainMenuItemsActionHandlers(ie);
_loadMenuItem = (null == im) ? null : im.get(LOAD_CMD);
_stopMenuItem = (null == im) ? null : im.get(STOP_CMD);
_runMenuItem = (null == im) ? null : im.get(RUN_CMD);
return im;
}
/*
* @see net.community.apps.common.FilesLoadMainFrame#getActionListenersMap(boolean)
*/
@Override
protected Map<String,? extends ActionListener> getActionListenersMap (boolean createIfNotExist)
{
final Map<String,? extends ActionListener> org=super.getActionListenersMap(createIfNotExist);
if (((org != null) && (org.size() > 0)) || (!createIfNotExist))
return org;
final Map<String,ActionListener> lm=new TreeMap<String,ActionListener>(String.CASE_INSENSITIVE_ORDER);
lm.put(LOAD_CMD, getLoadFileListener());
lm.put(SAVE_CMD, getSaveFileListener());
lm.put(EXIT_CMD, getExitActionListener());
lm.put(ABOUT_CMD, getShowManifestActionListener());
lm.put(RUN_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (final ActionEvent event)
{
if (event != null)
run();
}
});
lm.put(CLEAR_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (final ActionEvent event)
{
if (event != null)
clearLogMessagesArea();
}
});
lm.put(STOP_CMD, new ActionListener() {
/*
* @see java.awt.event.ActionListener#actionPerformed(java.awt.event.ActionEvent)
*/
@Override
public void actionPerformed (final ActionEvent event)
{
if (event != null)
stop();
}
});
setActionListenersMap(lm);
return lm;
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
final JPanel northPanel=new JPanel(new GridLayout(0, 1));
try
{
final JToolBar b=getMainToolBar();
final Map<String,? extends AbstractButton> hm=setToolBarHandlers(b);
if ((hm != null) && (hm.size() > 0))
{
_runBtn = hm.get(RUN_CMD);
_stopBtn = hm.get(STOP_CMD);
}
northPanel.add(b);
}
catch(Exception e)
{
throw ExceptionUtil.toRuntimeException(e);
}
if (_rootSelector != null)
northPanel.add(_rootSelector);
if (_scanRecursive != null)
northPanel.add(_scanRecursive);
final Container ctPane=getContentPane();
ctPane.add(northPanel, BorderLayout.NORTH);
if (_logsArea != null)
ctPane.add(new ScrolledComponent<JTextPane>(JTextPane.class, _logsArea), BorderLayout.CENTER);
// intercept and handle the closure via click on the "x" button
addWindowListener(new WindowAdapter() {
/*
* @see java.awt.event.WindowAdapter#windowClosing(java.awt.event.WindowEvent)
*/
@Override
public void windowClosing (WindowEvent e)
{
if (e != null)
exitApplication();
}
});
setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
}
}
| lgoldstein/communitychest | apps/apache/maven/conv2maven/src/main/java/net/community/apps/apache/maven/conv2maven/MainFrame.java | Java | apache-2.0 | 13,850 |
package org.soujava.cost.street.resource;
import javax.ejb.Singleton;
@Singleton
public class Schudule {
public void run(){
System.out.println("running...");
}
}
| otaviojava/cost.street | cost.street-web/src/main/java/org/soujava/cost/street/resource/Schudule.java | Java | apache-2.0 | 169 |
package com.doun.chapter9interface;
/**
* Created by Doun on 2017/3/21.
*/
public class LettersExchanger {
public String name() {
return getClass().getSimpleName();
}
public String doLetterExchange(String str){
char oldCharArray[] = str.toCharArray();
char charArray[] = new char[str.length()];
for (int i=0; i<str.length(); i++) {
if (i%2 == 0)//i指示当前字母为待交换的第一个字母
charArray[i] = str.charAt(i);
else {
char oldFirstLetter = charArray[i-1];
charArray[i-1] = str.charAt(i);
charArray[i] = oldFirstLetter;
}
}
return new String(charArray);
}
}
class LettersExchangerAdapter implements Processor {
LettersExchanger lettersExchanger;
public LettersExchangerAdapter(LettersExchanger lettersExchanger) {
this.lettersExchanger = lettersExchanger;
}
public String name() { return lettersExchanger.name(); }
public String process(Object input) {
return lettersExchanger.doLetterExchange((String)input);
}
} | Doun2017/StudyJavaCode | Chapter9Interface/app/src/main/java/com/doun/chapter9interface/LettersExchanger.java | Java | apache-2.0 | 1,140 |
package io.dropwizard.auth;
import org.glassfish.hk2.api.InjectionResolver;
import org.glassfish.hk2.api.ServiceLocator;
import org.glassfish.hk2.api.TypeLiteral;
import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.internal.inject.AbstractContainerRequestValueFactory;
import org.glassfish.jersey.server.internal.inject.AbstractValueFactoryProvider;
import org.glassfish.jersey.server.internal.inject.MultivaluedParameterExtractorProvider;
import org.glassfish.jersey.server.internal.inject.ParamInjectionResolver;
import org.glassfish.jersey.server.model.Parameter;
import org.glassfish.jersey.server.spi.internal.ValueFactoryProvider;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.security.Principal;
/**
* Value factory provider supporting {@link Principal} injection
* by the {@link Auth} annotation.
*
* @param <T> the type of the principal
*/
@Singleton
public class AuthValueFactoryProvider<T extends Principal> extends AbstractValueFactoryProvider {
/**
* Class of the provided {@link Principal}
*/
private final Class<T> principalClass;
/**
* {@link Principal} value factory provider injection constructor.
*
* @param mpep multivalued parameter extractor provider
* @param injector injector instance
* @param principalClassProvider provider of the principal class
*/
@Inject
public AuthValueFactoryProvider(MultivaluedParameterExtractorProvider mpep,
ServiceLocator injector, PrincipalClassProvider<T> principalClassProvider) {
super(mpep, injector, Parameter.Source.UNKNOWN);
this.principalClass = principalClassProvider.clazz;
}
/**
* Return a factory for the provided parameter. We only expect objects of
* the type {@link T} being annotated with {@link Auth} annotation.
*
* @param parameter parameter that was annotated for being injected
* @return the factory if annotated parameter matched type
*/
@Override
public AbstractContainerRequestValueFactory<?> createValueFactory(Parameter parameter) {
if (!parameter.isAnnotationPresent(Auth.class) || !principalClass.equals(parameter.getRawType())) {
return null;
}
return new AbstractContainerRequestValueFactory<Principal>() {
/**
* @return {@link Principal} stored on the request, or {@code null} if no object was found.
*/
public Principal provide() {
final Principal principal = getContainerRequest().getSecurityContext().getUserPrincipal();
if (principal == null) {
throw new IllegalStateException("Cannot inject a custom principal into unauthenticated request");
}
return principal;
}
};
}
@Singleton
static class AuthInjectionResolver extends ParamInjectionResolver<Auth> {
/**
* Create new {@link Auth} annotation injection resolver.
*/
AuthInjectionResolver() {
super(AuthValueFactoryProvider.class);
}
}
@Singleton
static class PrincipalClassProvider<T extends Principal> {
private final Class<T> clazz;
PrincipalClassProvider(Class<T> clazz) {
this.clazz = clazz;
}
}
/**
* Injection binder for {@link AuthValueFactoryProvider} and {@link AuthInjectionResolver}.
*
* @param <T> the type of the principal
*/
public static class Binder<T extends Principal> extends AbstractBinder {
private final Class<T> principalClass;
public Binder(Class<T> principalClass) {
this.principalClass = principalClass;
}
@Override
protected void configure() {
bind(new PrincipalClassProvider<>(principalClass)).to(PrincipalClassProvider.class);
bind(AuthValueFactoryProvider.class).to(ValueFactoryProvider.class).in(Singleton.class);
bind(AuthInjectionResolver.class).to(new TypeLiteral<InjectionResolver<Auth>>() {
}).in(Singleton.class);
}
}
}
| philandstuff/dropwizard | dropwizard-auth/src/main/java/io/dropwizard/auth/AuthValueFactoryProvider.java | Java | apache-2.0 | 4,226 |
<?php
/**
* @author Dawnc
* @date 2015-09-01
*/
return [
"default" => [
"driver" => 'wumashi\lib\Mysqli',
"hostname" => "127.0.0.1",
"username" => "root",
"password" => "123456",
"database" => "item",
"port" => 3306,
"charset" => "UTF8",
]
];
| dawnco/wumashi | conf/local/db.conf.php | PHP | apache-2.0 | 323 |
package i5.las2peer.services.ocd.algorithms.utils;
import i5.las2peer.services.ocd.graphs.CustomGraph;
import java.util.Map;
import y.base.Node;
/**
* Abstract Class for the Listener Rule used by the Speaker Listener
* Label Propagation Algorithm. Is part of the command pattern.
* @author Sebastian
*
*/
public interface SlpaListenerRuleCommand {
/**
* Determines the label which the listener node will accept.
* @param graph The graph that the algorithm is executed on.
* @param listener The listener node.
* @param receivedLabels A mapping from each speaker to the label received from that speaker.
* @return The accepted label.
*/
public abstract int getLabel(CustomGraph graph, Node listener, Map<Node, Integer> receivedLabels);
}
| rwth-acis/REST-OCD-Services | rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/utils/SlpaListenerRuleCommand.java | Java | apache-2.0 | 762 |
/**
* Copyright © 2017 albahrani (https://github.com/albahrani)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.albahrani.aquacontrol.server;
import java.io.File;
import java.util.Objects;
import java.util.Optional;
import com.github.albahrani.aquacontrol.core.LightEnvironment;
import com.github.albahrani.aquacontrol.core.LightTaskDaemon;
import com.github.albahrani.aquacontrol.core.LightTimer;
import com.github.albahrani.aquacontrol.server.json.JSONPlan;
import com.github.albahrani.aquacontrol.server.rest.RESTServer;
import com.github.albahrani.dimmingplan.DimmingPlan;
/**
* Hello world!
*
*/
public class LightServerController implements LightTaskDaemon {
private RESTServer server;
private LightTimer timer;
private LightEnvironment lightEnvironment;
private LightPlanStorage lightPlanStorage;
private DimmingPlan lightPlan = null;
public LightServerController() {
this.server = new RESTServer();
this.server.setDaemon(this);
this.timer = new LightTimer();
}
public void setLightEnvironment(LightEnvironment lightEnvironment) {
Objects.requireNonNull(lightEnvironment);
this.lightEnvironment = lightEnvironment;
}
public void setLightPlanStorage(LightPlanStorage lightPlanStorage) {
Objects.requireNonNull(lightPlanStorage);
this.lightPlanStorage = lightPlanStorage;
}
public void loadLightPlanFromFile(Optional<File> lightPlanFile) {
this.lightPlan = this.lightPlanStorage.loadLightPlanFromFile(lightPlanFile);
}
public void updateLightPlan(JSONPlan jsonPlan) {
this.lightPlanStorage.setJsonLightPlan(jsonPlan);
this.lightPlanStorage.storeLightPlanToFile(this.lightPlanStorage.getLightPlanFile());
}
void setLightPlan(DimmingPlan lightPlan) {
this.lightPlan = lightPlan;
}
@Override
public LightEnvironment getLightEnvironment() {
return this.lightEnvironment;
}
@Override
public DimmingPlan getLightPlan() {
return lightPlan;
}
void setTimer(LightTimer timer) {
this.timer = timer;
}
void setServer(RESTServer server) {
this.server = server;
}
public void start() {
System.out.println("Starting LightTimer");
this.timer.start(this);
System.out.println("Starting RESTServer");
this.server.start();
}
public void resume() {
System.out.println("Resuming LightTimer");
this.timer.start(this);
}
public void pause() {
System.out.println("Pausing LightTimer");
this.timer.stop();
}
public void shutdown() {
this.server.shutdown();
this.timer.shutdown();
this.lightEnvironment.shutdown();
}
public void setForcedValue(String channelId, double channelValue) {
this.lightPlan.channel(channelId).pin(channelValue);
}
public void clearForcedValue(String channelId) {
this.lightPlan.channel(channelId).unpin();
}
public JSONPlan getJsonLightPlan() {
return this.lightPlanStorage.getJsonLightPlan();
}
}
| albahrani/aquacontrol-server | src/main/java/com/github/albahrani/aquacontrol/server/LightServerController.java | Java | apache-2.0 | 3,362 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A class to store named variables and a scope operator to manage sharing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections as collections_lib
import copy
import enum # pylint: disable=g-bad-import-order
import functools
import sys
import threading
import traceback
import six
from six import iteritems
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python import tf2
from tensorflow.python.eager import context
from tensorflow.python.eager import monitoring
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import deprecation
from tensorflow.python.util import function_utils
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"AUTO_REUSE", "VariableScope", "get_variable_scope", "get_variable",
"get_local_variable", "variable_scope", "variable_op_scope",
"no_regularizer", "VariableSynchronization", "VariableAggregation"
]
_api_usage_gauge = monitoring.BoolGauge(
"/tensorflow/api/resource_variables",
"Whether variable_scope.enable_resource_variables() is called.")
class _PartitionInfo(object):
"""Holds partition info used by initializer functions."""
def __init__(self, full_shape, var_offset):
"""Constructor.
Args:
full_shape: Tuple or list of `int` indicating the full combined shape of
the partitioned variables.
var_offset: Tuple or list of `int` specifying offset of this partition
with respect to the full variable for each dimension.
Raises:
TypeError: If `full_shape` or `var_offset` is not a sequence.
ValueError: If `full_shape` or `var_offset` differ in length. If
`var_offset` exceeds `full_shape` in any dimension.
"""
if not isinstance(full_shape, collections_lib.Sequence) or isinstance(
full_shape, six.string_types):
raise TypeError(
"`full_shape` must be a sequence (like tuple or list) instead of " +
type(full_shape).__name__)
if not isinstance(var_offset, collections_lib.Sequence) or isinstance(
var_offset, six.string_types):
raise TypeError(
"`var_offset` must be a sequence (like tuple or list) instead of " +
type(var_offset).__name__)
if len(var_offset) != len(full_shape):
raise ValueError(
"Expected equal length, but `var_offset` is of length {} while "
"full_shape is of length {}.".format(
len(var_offset), len(full_shape)))
for i in xrange(len(full_shape)):
offset = var_offset[i]
shape = full_shape[i]
if offset < 0 or offset >= shape:
raise ValueError(
"Expected 0 <= offset < shape but found offset={}, shape={} for "
"var_offset={}, full_shape={}".format(offset, shape, var_offset,
full_shape))
self._full_shape = full_shape
self._var_offset = var_offset
@property
def full_shape(self):
return self._full_shape
@property
def var_offset(self):
return self._var_offset
def single_offset(self, shape):
"""Returns the offset when the variable is partitioned in at most one dim.
Args:
shape: Tuple or list of `int` indicating the shape of one specific
variable partition.
Returns:
`int` representing the offset in the dimension along which the variable is
partitioned. Returns 0 if the variable is not being partitioned.
Raises:
ValueError: Depending on self.single_slice_dim().
"""
single_slice_dim = self.single_slice_dim(shape)
# If this variable is not being partitioned at all, single_slice_dim() could
# return None.
if single_slice_dim is None:
return 0
return self.var_offset[single_slice_dim]
def single_slice_dim(self, shape):
"""Returns the slice dim when the variable is partitioned only in one dim.
Args:
shape: Tuple or list of `int` indicating the shape of one specific
variable partition.
Returns:
`int` representing the dimension that the variable is partitioned in, or
`None` if the variable doesn't seem to be partitioned at all.
Raises:
TypeError: If `shape` is not a sequence.
ValueError: If `shape` is not the same length as `self.full_shape`. If
the variable is partitioned in more than one dimension.
"""
if not isinstance(shape, collections_lib.Sequence) or isinstance(
shape, six.string_types):
raise TypeError(
"`shape` must be a sequence (like tuple or list) instead of " +
type(shape).__name__)
if len(shape) != len(self.full_shape):
raise ValueError(
"Expected equal length, but received shape={} of length {} while "
"self.full_shape={} is of length {}.".format(shape, len(shape),
self.full_shape,
len(self.full_shape)))
for i in xrange(len(shape)):
if self.var_offset[i] + shape[i] > self.full_shape[i]:
raise ValueError(
"With self.var_offset={}, a partition of shape={} would exceed "
"self.full_shape={} in dimension {}.".format(
self.var_offset, shape, self.full_shape, i))
slice_dim = None
for i in xrange(len(shape)):
if shape[i] == self.full_shape[i]:
continue
if slice_dim is not None:
raise ValueError(
"Cannot use single_slice_dim() with shape={} and "
"self.full_shape={} since slice dim could be either dimension {} "
"or {}.".format(shape, self.full_shape, i, slice_dim))
slice_dim = i
return slice_dim
class _ReuseMode(enum.Enum):
"""Mode for variable access within a variable scope."""
# Indicates that variables are to be fetched if they already exist or
# otherwise created.
AUTO_REUSE = 1
# TODO(alive): For TensorFlow 2.0, Deprecate True/False/None API in favor of
# enum values.
# REUSE_FALSE = 2
# REUSE_TRUE = 3
# TODO(apassos) remove these forwarding symbols.
VariableSynchronization = variables.VariableSynchronization # pylint: disable=invalid-name
VariableAggregation = variables.VariableAggregation # pylint: disable=invalid-name
AUTO_REUSE = _ReuseMode.AUTO_REUSE
tf_export(v1=["AUTO_REUSE"]).export_constant(__name__, "AUTO_REUSE")
AUTO_REUSE.__doc__ = """
When passed in as the value for the `reuse` flag, AUTO_REUSE indicates that
get_variable() should create the requested variable if it doesn't exist or, if
it does exist, simply return it.
"""
_DEFAULT_USE_RESOURCE = tf2.enabled()
@tf_export(v1=["enable_resource_variables"])
def enable_resource_variables():
"""Creates resource variables by default.
Resource variables are improved versions of TensorFlow variables with a
well-defined memory model. Accessing a resource variable reads its value, and
all ops which access a specific read value of the variable are guaranteed to
see the same value for that tensor. Writes which happen after a read (by
having a control or data dependency on the read) are guaranteed not to affect
the value of the read tensor, and similarly writes which happen before a read
are guaranteed to affect the value. No guarantees are made about unordered
read/write pairs.
Calling tf.enable_resource_variables() lets you opt-in to this TensorFlow 2.0
feature.
"""
global _DEFAULT_USE_RESOURCE
_DEFAULT_USE_RESOURCE = True
_api_usage_gauge.get_cell().set(True)
@tf_export(v1=["resource_variables_enabled"])
def resource_variables_enabled():
"""Returns `True` if resource variables are enabled.
Resource variables are improved versions of TensorFlow variables with a
well-defined memory model. Accessing a resource variable reads its value, and
all ops which access a specific read value of the variable are guaranteed to
see the same value for that tensor. Writes which happen after a read (by
having a control or data dependency on the read) are guaranteed not to affect
the value of the read tensor, and similarly writes which happen before a read
are guaranteed to affect the value. No guarantees are made about unordered
read/write pairs.
Calling tf.enable_resource_variables() lets you opt-in to this TensorFlow 2.0
feature.
"""
global _DEFAULT_USE_RESOURCE
return _DEFAULT_USE_RESOURCE
@deprecation.deprecated(
None, "non-resource variables are not supported in the long term")
@tf_export(v1=["disable_resource_variables"])
def disable_resource_variables():
"""Opts out of resource variables.
If your code needs tf.disable_resource_variables() to be called to work
properly please file a bug.
"""
global _DEFAULT_USE_RESOURCE
_DEFAULT_USE_RESOURCE = False
_api_usage_gauge.get_cell().set(False)
class _VariableStore(object):
"""Variable store that carries a number of named Variables.
New variable names and new variables can be created; all stored
variables are initialized with the initializer passed to __init__.
Attributes:
vars: a dictionary with string names (same as passed in GetVar) as keys and
the corresponding TensorFlow Variables as values.
"""
def __init__(self):
"""Create a variable store."""
self._vars = {} # A dictionary of the stored TensorFlow variables.
self._partitioned_vars = {} # A dict of the stored PartitionedVariables.
self._store_eager_variables = False
def get_variable(self,
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with these parameters or create a new one.
If a variable with the given name is already stored, we return the stored
variable. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
Set `reuse` to None (the default) or tf.compat.v1.AUTO_REUSE when you want
variables to be created if they don't exist or returned if they do.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If a partitioner is provided, a `PartitionedVariable` is returned.
Accessing this object as a `Tensor` returns the shards concatenated along
the partition axis.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable.
regularizer: A (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
reuse: a Boolean, None, or tf.AUTO_REUSE. Controls reuse or creation of
variables. When eager execution is enabled this argument is always
forced to be False.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). `trainable`
defaults to `True` unless `synchronization` is set to `ON_READ`.
collections: List of graph collections keys to add the `Variable` to.
Defaults to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the `Variable` reside, to
deduplicate copying through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and dtype of the `Variable` to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known.
use_resource: If False, creates a regular Variable. If True, creates
instead an experimental ResourceVariable which has well-defined
semantics. Defaults to False (will later change to True). When eager
execution is enabled this argument is always forced to be true.
custom_getter: Callable that takes as a first argument the true getter,
and allows overwriting the internal get_variable method. The signature
of `custom_getter` should match that of this method,
but the most future-proof version will allow for changes: `def
custom_getter(getter, *args, **kwargs)`. Direct access to
all `get_variable` parameters is also allowed: `def
custom_getter(getter, name, *args, **kwargs)`. A simple identity
custom getter that simply creates variables with modified names is:
```python
def custom_getter(getter, name, *args, **kwargs): return getter(name +
'_suffix', *args, **kwargs) ```
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to
synchronize. If `synchronization` is set to `ON_READ`, `trainable` must
not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
The created or existing `Variable` (or `PartitionedVariable`, if a
partitioner was used).
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
or when violating reuse during variable creation.
RuntimeError: when eager execution is enabled and not called from an
EagerVariableStore.
"""
if custom_getter is not None and not callable(custom_getter):
raise ValueError("Passed a custom_getter which is not callable: %s" %
custom_getter)
with ops.init_scope():
if context.executing_eagerly():
# Variable creation and initialization takes place in `init_scope`s;
# as such, if an `init_scope` lifts us into the eager context, then we
# need to use `ResourceVariable`s.
use_resource = True
# Note that it's fine to reuse eager variables whose initialization was
# lifted from a function-building graph into the eager context (that's why
# the following clause is not wrapped in an `init_scope`); lifted variables
# are tracked by the graph's `VariableStore`.
if context.executing_eagerly():
if not self._store_eager_variables and reuse:
raise RuntimeError(
"When eager execution is enabled variable reuse is only supported"
" when an EagerVariableStore is active. See the documentation on"
" EagerVariableStore for example usage.")
if self._store_eager_variables:
reuse = AUTO_REUSE
# If a *_ref type is passed in an error would be triggered further down the
# stack. We prevent this using base_dtype to get a non-ref version of the
# type, before doing anything else. When _ref types are removed in favor of
# resources, this line can be removed.
try:
dtype = dtype.base_dtype
except AttributeError:
# .base_dtype not existing means that we will try and use the raw dtype
# which was passed in - this might be a NumPy type which is valid.
pass
# This is the main logic of get_variable. However, custom_getter
# may override this logic. So we save it as a callable and pass
# it to custom_getter.
# Note: the parameters of _true_getter, and their documentation, match
# *exactly* item-for-item with the docstring of this method.
def _true_getter( # pylint: disable=missing-docstring
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
is_scalar = (
shape is not None and isinstance(shape, collections_lib.Sequence) and
not shape)
# Partitioned variable case
if partitioner is not None and not is_scalar:
if not callable(partitioner):
raise ValueError("Partitioner must be callable, but received: %s" %
partitioner)
with ops.name_scope(None):
return self._get_partitioned_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# Special case for partitioned variable to allow reuse without having to
# specify partitioner.
if (reuse is True and partitioner is None
and name in self._partitioned_vars):
return self._get_partitioned_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=None,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# Single variable case
if "%s/part_0" % name in self._vars:
raise ValueError(
"No partitioner was provided, but a partitioned version of the "
"variable was found: %s/part_0. Perhaps a variable of the same "
"name was already created with partitioning?" % name)
return self._get_single_variable(
name=name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
synchronization, aggregation, trainable = (
variables.validate_synchronization_aggregation_trainable(
synchronization, aggregation, trainable, name))
if custom_getter is not None:
# Handle backwards compatibility with getter arguments that were added
# to the API after users started writing custom getters.
custom_getter_kwargs = {
"getter": _true_getter,
"name": name,
"shape": shape,
"dtype": dtype,
"initializer": initializer,
"regularizer": regularizer,
"reuse": reuse,
"trainable": trainable,
"collections": collections,
"caching_device": caching_device,
"partitioner": partitioner,
"validate_shape": validate_shape,
"use_resource": use_resource,
"synchronization": synchronization,
"aggregation": aggregation,
}
# `fn_args` and `has_kwargs` can handle functions, `functools.partial`,
# `lambda`.
if ("constraint" in function_utils.fn_args(custom_getter) or
function_utils.has_kwargs(custom_getter)):
custom_getter_kwargs["constraint"] = constraint
return custom_getter(**custom_getter_kwargs)
else:
return _true_getter(
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
def _get_partitioned_variable(self,
name,
partitioner,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets or creates a sharded variable list with these parameters.
The `partitioner` must be a callable that accepts a fully defined
`TensorShape` and returns a sequence of integers (the `partitions`).
These integers describe how to partition the given sharded `Variable`
along the given dimension. That is, `partitions[1] = 3` means split
the `Variable` into 3 shards along dimension 1. Currently, sharding along
only one axis is supported.
If the list of variables with the given name (prefix) is already stored,
we return the stored variables. Otherwise, we create a new one.
Set `reuse` to `True` when you only want to reuse existing Variables.
Set `reuse` to `False` when you only want to create new Variables.
Set `reuse` to None (the default) or tf.compat.v1.AUTO_REUSE when you want
variables to be created if they don't exist or returned if they do.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If the initializer is a callable, then it will be called for each
shard. Otherwise the initializer should match the shape of the entire
sharded Variable, and it will be sliced accordingly for each shard.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: the name of the new or existing sharded variable.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
shape: shape of the new or existing sharded variable.
dtype: type of the new or existing sharded variable (defaults to
`DT_FLOAT`).
initializer: initializer for the sharded variable.
regularizer: a (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
reuse: a Boolean, None, or tf.AUTO_REUSE. Controls reuse or creation of
variables.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: List of graph collections keys to add the Variable to.
Defaults to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known.
use_resource: If False, creates a regular Variable. If True, creates an
experimental ResourceVariable which has well-defined semantics. Defaults
to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to
synchronize. If `synchronization` is set to `ON_READ`, `trainable` must
not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
A `PartitionedVariable` object.
Raises:
ValueError: when creating a new variable and shape is not declared,
when reusing a variable and specifying a conflicting shape,
when violating reuse during variable creation, or if an existing
sharded variable exists for the given name but with different sharding.
"""
initializing_from_value = initializer is not None and isinstance(
initializer, ops.Tensor)
if name in self._vars:
raise ValueError(
"A partitioner was provided, but an unpartitioned version of the "
"variable was found: %s. Perhaps a variable of the same name was "
"already created without partitioning?" % name)
shape = tensor_shape.as_shape(shape)
if initializing_from_value:
shape = shape.merge_with(initializer.get_shape())
partitions = None
if not reuse or partitioner:
partitions = _call_partitioner(partitioner, shape, dtype)
if name in self._partitioned_vars:
if reuse is False:
raise ValueError(
"Partitioned variable with name %s already exists. Did you mean to "
"set reuse=True or reuse=tf.AUTO_REUSE in VarScope?" % name)
existing_var = self._partitioned_vars[name]
if not shape.is_compatible_with(existing_var.get_shape()):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified shape %s "
"and found shape %s." % (name, shape, existing_var.get_shape()))
if not dtype.is_compatible_with(existing_var.dtype):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified dtype %s "
"and found dtype %s." % (name, dtype.name, existing_var.dtype.name))
# pylint: disable=protected-access
if (partitions is not None and
existing_var._get_partitions() != partitions):
raise ValueError(
"Trying to reuse partitioned variable %s, but specified partitions "
"%s and found partitions %s." %
(name, partitions, existing_var._get_partitions()))
# pylint: enable=protected-access
return existing_var
if reuse is True:
raise ValueError("PartitionedVariable %s does not exist, or was not "
"created with tf.get_variable(). Did you mean to set "
"reuse=False or reuse=tf.AUTO_REUSE in VarScope?" % name)
slice_dim, num_slices = _get_slice_dim_and_num_slices(partitions)
if "%s/part_0" % name in self._vars:
if "%s/part_%d" % (name, num_slices - 1) not in self._vars:
raise ValueError(
"Partitioner returned a different partitioning than what was "
"already found. Partitioner returned %d shards, and shard "
"%s/part_0 was found, but %s/part_%d was not." %
(num_slices, name, name, num_slices - 1))
if "%s/part_%d" % (name, num_slices) in self._vars:
raise ValueError(
"Partitioner returned a different partitioning than what was "
"already found. Partitioner returned %d shards, and shard "
"%s/part_0 was found, but so was the extra shard %s/part_%d." %
(num_slices, name, name, num_slices))
vs = []
for i, (var_offset, var_shape) in enumerate(
_iter_slices(shape.as_list(), num_slices, slice_dim)):
partition_info = _PartitionInfo(
full_shape=shape.as_list(), var_offset=var_offset)
var_full_name = "%s/part_%d" % (name, i)
with ops.name_scope(var_full_name + "/PartitionedInitializer"):
# Create the tensor to initialize the variable with default value.
if initializer is None:
init, initializing_from_value = self._get_default_initializer(
name=name, shape=shape, dtype=dtype)
if initializing_from_value:
init_shape = None
else:
init_shape = var_shape
elif callable(initializer):
init = initializer
init_shape = var_shape
elif isinstance(initializer, ops.Tensor):
init = array_ops.slice(initializer, var_offset, var_shape)
# Use the dtype of the given tensor.
dtype = init.dtype.base_dtype
init_shape = None
else:
init = ops.convert_to_tensor(initializer, dtype=dtype)
init = array_ops.slice(init, var_offset, var_shape)
init_shape = None
with ops.name_scope(None):
var = self._get_single_variable(
name=var_full_name,
shape=init_shape,
dtype=dtype,
initializer=init,
partition_info=partition_info,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: disable=protected-access
var._set_save_slice_info(
variables.Variable.SaveSliceInfo(name, shape.as_list(), var_offset,
var_shape))
vs.append(var)
# pylint: enable=protected-access
partitioned_var = variables.PartitionedVariable(
name=name,
shape=shape,
dtype=dtype,
variable_list=vs,
partitions=partitions)
if not context.executing_eagerly() or self._store_eager_variables:
self._partitioned_vars[name] = partitioned_var
return partitioned_var
def _get_single_variable(self,
name,
shape=None,
dtype=dtypes.float32,
initializer=None,
regularizer=None,
partition_info=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Get or create a single Variable (e.g.
a shard or entire variable).
See the documentation of get_variable above (ignore partitioning components)
for details.
Args:
name: see get_variable.
shape: see get_variable.
dtype: see get_variable.
initializer: see get_variable.
regularizer: see get_variable.
partition_info: _PartitionInfo object.
reuse: see get_variable.
trainable: see get_variable.
collections: see get_variable.
caching_device: see get_variable.
validate_shape: see get_variable.
use_resource: see get_variable.
constraint: see get_variable.
synchronization: see get_variable.
aggregation: see get_variable.
Returns:
A Variable. See documentation of get_variable above.
Raises:
ValueError: See documentation of get_variable above.
"""
# Set to true if initializer is a constant.
initializing_from_value = False
if initializer is not None and not callable(initializer):
initializing_from_value = True
if shape is not None and initializing_from_value:
raise ValueError("If initializer is a constant, do not specify shape.")
dtype = dtypes.as_dtype(dtype)
shape = tensor_shape.as_shape(shape)
if name in self._vars:
# Here we handle the case when returning an existing variable.
if reuse is False:
var = self._vars[name]
err_msg = ("Variable %s already exists, disallowed."
" Did you mean to set reuse=True or "
"reuse=tf.AUTO_REUSE in VarScope?" % name)
# ResourceVariables don't have an op associated with so no traceback
if isinstance(var, resource_variable_ops.ResourceVariable):
raise ValueError(err_msg)
tb = var.op.traceback[::-1]
# Throw away internal tf entries and only take a few lines. In some
# cases the traceback can be longer (e.g. if someone uses factory
# functions to create variables) so we take more than needed in the
# default case.
tb = [x for x in tb if "tensorflow/python" not in x[0]][:5]
raise ValueError("%s Originally defined at:\n\n%s" %
(err_msg, "".join(traceback.format_list(tb))))
found_var = self._vars[name]
if not shape.is_compatible_with(found_var.get_shape()):
raise ValueError("Trying to share variable %s, but specified shape %s"
" and found shape %s." %
(name, shape, found_var.get_shape()))
if not dtype.is_compatible_with(found_var.dtype):
dtype_str = dtype.name
found_type_str = found_var.dtype.name
raise ValueError("Trying to share variable %s, but specified dtype %s"
" and found dtype %s." %
(name, dtype_str, found_type_str))
return found_var
# The code below handles only the case of creating a new variable.
if reuse is True:
raise ValueError("Variable %s does not exist, or was not created with "
"tf.get_variable(). Did you mean to set "
"reuse=tf.AUTO_REUSE in VarScope?" % name)
# Create the tensor to initialize the variable with default value.
if initializer is None:
initializer, initializing_from_value = self._get_default_initializer(
name=name, shape=shape, dtype=dtype)
# Enter an init scope when creating the initializer.
with ops.init_scope():
if initializing_from_value:
init_val = initializer
variable_dtype = None
else:
# Instantiate initializer if provided initializer is a type object.
if tf_inspect.isclass(initializer):
initializer = initializer()
if shape is not None and shape.is_fully_defined():
init_val = lambda: initializer( # pylint: disable=g-long-lambda
shape.as_list(),
dtype=dtype,
partition_info=partition_info)
variable_dtype = dtype.base_dtype
elif len(tf_inspect.getargspec(initializer).args) == len(
tf_inspect.getargspec(initializer).defaults or []):
init_val = initializer
variable_dtype = None
else:
raise ValueError("The initializer passed is not valid. It should "
"be a callable with no arguments and the "
"shape should not be provided or an instance of "
"`tf.keras.initializers.*' and `shape` should be "
"fully defined.")
# Create the variable.
if use_resource is None:
# Set the default value if unspecified.
use_resource = _DEFAULT_USE_RESOURCE
v = variables.VariableV1(
initial_value=init_val,
name=name,
trainable=trainable,
collections=collections,
caching_device=caching_device,
dtype=variable_dtype,
validate_shape=validate_shape,
constraint=constraint,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation)
if context.executing_eagerly() and self._store_eager_variables:
if collections:
ops.add_to_collections(collections, v)
else:
ops.add_to_collection(ops.GraphKeys.GLOBAL_VARIABLES, v)
if trainable:
ops.add_to_collection(ops.GraphKeys.TRAINABLE_VARIABLES, v)
if not context.executing_eagerly() or self._store_eager_variables:
# In eager mode we do not want to keep default references to Variable
# objects as this will prevent their memory from being released.
self._vars[name] = v
logging.vlog(1, "Created variable %s with shape %s and init %s", v.name,
format(shape), initializer)
# Run the regularizer if requested and save the resulting loss.
if regularizer:
with ops.colocate_with(v):
with ops.name_scope(name + "/Regularizer/"):
with ops.init_scope():
loss = regularizer(v)
if loss is not None:
if context.executing_eagerly():
v_name = "v_%s" % type(v)
loss_name = "loss_%s" % type(loss)
else:
v_name = v.name
loss_name = loss.name
logging.vlog(
1, "Applied regularizer to %s and added the result %s "
"to REGULARIZATION_LOSSES.", v_name, loss_name)
ops.add_to_collection(ops.GraphKeys.REGULARIZATION_LOSSES, loss)
return v
# Initialize variable when no initializer provided
def _get_default_initializer(self, name, shape=None, dtype=dtypes.float32):
"""Provide a default initializer and a corresponding value.
Args:
name: see get_variable.
shape: see get_variable.
dtype: see get_variable.
Returns:
initializer and initializing_from_value. See get_variable above.
Raises:
ValueError: When giving unsupported dtype.
"""
del shape
# If dtype is DT_FLOAT, provide a uniform unit scaling initializer
if dtype.is_floating:
initializer = init_ops.glorot_uniform_initializer()
initializing_from_value = False
# If dtype is DT_INT/DT_UINT, provide a default value `zero`
# If dtype is DT_BOOL, provide a default value `FALSE`
elif (dtype.is_integer or dtype.is_unsigned or dtype.is_bool or
dtype == dtypes.string):
initializer = init_ops.zeros_initializer()
initializing_from_value = False
# NOTES:Do we need to support for handling DT_STRING and DT_COMPLEX here?
else:
raise ValueError("An initializer for variable %s of %s is required" %
(name, dtype.base_dtype))
return initializer, initializing_from_value
# To stop regularization, use this regularizer
@tf_export(v1=["no_regularizer"])
def no_regularizer(_):
"""Use this function to prevent regularization of variables."""
return None
# TODO(alive): support caching devices and partitioned variables in Eager mode.
@tf_export(v1=["VariableScope"])
class VariableScope(object):
"""Variable scope object to carry defaults to provide to `get_variable`.
Many of the arguments we need for `get_variable` in a variable store are most
easily handled with a context. This object is used for the defaults.
Attributes:
name: name of the current scope, used as prefix in get_variable.
initializer: default initializer passed to get_variable.
regularizer: default regularizer passed to get_variable.
reuse: Boolean, None, or tf.compat.v1.AUTO_REUSE, setting the reuse in
get_variable. When eager execution is enabled this argument is always
forced to be False.
caching_device: string, callable, or None: the caching device passed to
get_variable.
partitioner: callable or `None`: the partitioner passed to `get_variable`.
custom_getter: default custom getter passed to get_variable.
name_scope: The name passed to `tf.name_scope`.
dtype: default type passed to get_variable (defaults to DT_FLOAT).
use_resource: if False, create a normal Variable; if True create an
experimental ResourceVariable with well-defined semantics. Defaults to
False (will later change to True). When eager execution is enabled this
argument is always forced to be True.
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
"""
def __init__(self,
reuse,
name="",
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
name_scope="",
dtype=dtypes.float32,
use_resource=None,
constraint=None):
"""Creates a new VariableScope with the given properties."""
self._name = name
self._initializer = initializer
self._regularizer = regularizer
self._reuse = reuse
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._name_scope = name_scope
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
if context.executing_eagerly():
if self._caching_device is not None:
raise NotImplementedError("Caching devices is not yet supported "
"when eager execution is enabled.")
self._reuse = AUTO_REUSE
self._use_resource = True
@property
def name(self):
return self._name
@property
def original_name_scope(self):
return self._name_scope
@property
def reuse(self):
return self._reuse
@property
def initializer(self):
return self._initializer
@property
def dtype(self):
return self._dtype
@property
def use_resource(self):
return self._use_resource
@property
def regularizer(self):
return self._regularizer
@property
def caching_device(self):
return self._caching_device
@property
def partitioner(self):
return self._partitioner
@property
def custom_getter(self):
return self._custom_getter
@property
def constraint(self):
return self._constraint
def reuse_variables(self):
"""Reuse variables in this scope."""
self._reuse = True
def set_initializer(self, initializer):
"""Set initializer for this scope."""
self._initializer = initializer
def set_dtype(self, dtype):
"""Set data type for this scope."""
self._dtype = dtype
def set_use_resource(self, use_resource):
"""Sets whether to use ResourceVariables for this scope."""
if context.executing_eagerly() and not use_resource:
raise ValueError("When eager execution is enabled, "
"use_resource cannot be set to false.")
self._use_resource = use_resource
def set_regularizer(self, regularizer):
"""Set regularizer for this scope."""
self._regularizer = regularizer
def set_caching_device(self, caching_device):
"""Set caching_device for this scope."""
if context.executing_eagerly():
raise NotImplementedError("Caching devices are not yet supported "
"when eager execution is enabled.")
self._caching_device = caching_device
def set_partitioner(self, partitioner):
"""Set partitioner for this scope."""
self._partitioner = partitioner
def set_custom_getter(self, custom_getter):
"""Set custom getter for this scope."""
self._custom_getter = custom_getter
def get_collection(self, name):
"""Get this scope's variables."""
scope = self._name + "/" if self._name else ""
return ops.get_collection(name, scope)
def trainable_variables(self):
"""Get this scope's trainable variables."""
return self.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
def global_variables(self):
"""Get this scope's global variables."""
return self.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
def local_variables(self):
"""Get this scope's local variables."""
return self.get_collection(ops.GraphKeys.LOCAL_VARIABLES)
def get_variable(self,
var_store,
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
reuse=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with this name or create a new one."""
if regularizer is None:
regularizer = self._regularizer
if caching_device is None:
caching_device = self._caching_device
if partitioner is None:
partitioner = self._partitioner
if custom_getter is None:
custom_getter = self._custom_getter
if context.executing_eagerly():
reuse = False
use_resource = True
else:
if reuse is None:
reuse = self._reuse
if use_resource is None:
use_resource = self._use_resource
full_name = self.name + "/" + name if self.name else name
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
# Check that `initializer` dtype and `dtype` are consistent before
# replacing them with defaults.
if (dtype is not None and initializer is not None and
not callable(initializer)):
init_dtype = ops.convert_to_tensor(initializer).dtype.base_dtype
if init_dtype != dtype:
raise ValueError("Initializer type '%s' and explicit dtype '%s' "
"don't match." % (init_dtype, dtype))
if initializer is None:
initializer = self._initializer
if constraint is None:
constraint = self._constraint
if dtype is None:
dtype = self._dtype
return var_store.get_variable(
full_name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
custom_getter=custom_getter,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
def _get_partitioned_variable(self,
var_store,
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets an existing variable with this name or create a new one."""
if initializer is None:
initializer = self._initializer
if regularizer is None:
regularizer = self._regularizer
if constraint is None:
constraint = self._constraint
if caching_device is None:
caching_device = self._caching_device
if partitioner is None:
partitioner = self._partitioner
if dtype is None:
dtype = self._dtype
if use_resource is None:
use_resource = self._use_resource
if self._custom_getter is not None:
raise ValueError(
"Private access to _get_partitioned_variable is not allowed when "
"a custom getter is set. Current custom getter: %s. "
"It is likely that you're using create_partitioned_variables. "
"If so, consider instead using get_variable with a non-empty "
"partitioner parameter instead." % self._custom_getter)
if partitioner is None:
raise ValueError("No partitioner was specified")
# This allows the variable scope name to be used as the variable name if
# this function is invoked with an empty name arg, for backward
# compatibility with create_partitioned_variables().
full_name_list = []
if self.name:
full_name_list.append(self.name)
if name:
full_name_list.append(name)
full_name = "/".join(full_name_list)
# Variable names only depend on variable_scope (full_name here),
# not name_scope, so we reset it below for the time of variable creation.
with ops.name_scope(None):
# pylint: disable=protected-access
return var_store._get_partitioned_variable(
full_name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
reuse=self.reuse,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: enable=protected-access
_VARSTORE_KEY = ("__variable_store",)
_VARSCOPESTORE_KEY = ("__varscope",)
class _VariableScopeStore(threading.local):
"""A thread local store for the current variable scope and scope counts."""
def __init__(self):
super(_VariableScopeStore, self).__init__()
self.current_scope = VariableScope(False)
self.variable_scopes_count = {}
def open_variable_scope(self, scope_name):
if scope_name in self.variable_scopes_count:
self.variable_scopes_count[scope_name] += 1
else:
self.variable_scopes_count[scope_name] = 1
def close_variable_subscopes(self, scope_name):
for k in list(self.variable_scopes_count.keys()):
if scope_name is None or k.startswith(scope_name + "/"):
self.variable_scopes_count[k] = 0
def variable_scope_count(self, scope_name):
return self.variable_scopes_count.get(scope_name, 0)
def get_variable_scope_store():
"""Returns the variable scope store for current thread."""
scope_store = ops.get_collection(_VARSCOPESTORE_KEY)
if not scope_store:
scope_store = _VariableScopeStore()
ops.add_to_collection(_VARSCOPESTORE_KEY, scope_store)
else:
scope_store = scope_store[0]
return scope_store
@tf_export(v1=["get_variable_scope"])
def get_variable_scope():
"""Returns the current variable scope."""
return get_variable_scope_store().current_scope
def _get_default_variable_store():
store = ops.get_collection(_VARSTORE_KEY)
if store:
return store[0]
store = _VariableStore()
ops.add_to_collection(_VARSTORE_KEY, store)
return store
@tf_contextlib.contextmanager
def with_variable_store(store):
store_collection = ops.get_collection_ref(_VARSTORE_KEY)
old = list(store_collection)
store_collection[:] = [store]
try:
yield
finally:
store_collection[:] = old
class EagerVariableStore(object):
"""Wrapper allowing functional layers to be used with eager execution.
When eager execution is enabled Variables get deleted when they go out of
scope, and are not stored in global collections by default. A lot of code
(mostly the functional layers in tf.layers) assumes that variables are kept in
a global list.
EagerVariableStore can be used in conjunction with this code to make it
eager-friendly. For example, to create a dense layer, use:
```
container = tfe.EagerVariableStore()
for input in dataset_iterator:
with container.as_default():
x = tf.compat.v1.layers.dense(input, name="l1")
print(container.variables) # Should print the variables used in the layer.
```
"""
def __init__(self, store=None):
if store is not None:
if not store._store_eager_variables: # pylint: disable=protected-access
raise ValueError("Cannot construct EagerVariableStore from a "
"VariableStore object that does not hold eager "
"variables.")
self._store = store
else:
self._store = _VariableStore()
self._store._store_eager_variables = True # pylint: disable=protected-access
def as_default(self):
return with_variable_store(self._store)
def variables(self):
return sorted(self._store._vars.values(), key=lambda x: x.name) # pylint: disable=protected-access
def trainable_variables(self):
# pylint: disable=protected-access
return sorted([x for x in self._store._vars.values() if x.trainable],
key=lambda x: x.name)
# pylint: enable=protected-access
def non_trainable_variables(self):
# pylint: disable=protected-access
return sorted([x for x in self._store._vars.values() if not x.trainable],
key=lambda x: x.name)
# pylint: enable=protected-access
def copy(self):
"""Copy this variable store and all of its contents.
Variables contained in this store will be copied over to the new variable
store, meaning that they can be modified without affecting the variables in
this store.
Returns:
A new EagerVariableStore instance containing copied variables.
"""
# pylint: disable=protected-access
new_store = EagerVariableStore()
for key, var in iteritems(self._store._vars):
# Strip device out of variable name.
try:
index = var.name.index(":")
except ValueError:
stripped_var_name = var.name
else:
stripped_var_name = var.name[:index]
# Create new variable with same value, name, and "trainable" flag.
new_var = resource_variable_ops.ResourceVariable(
var.read_value(), name=stripped_var_name, trainable=var.trainable)
new_store._store._vars[key] = new_var
return new_store
# pylint: enable=protected-access
# The argument list for get_variable must match arguments to get_local_variable.
# So, if you are updating the arguments, also update arguments to
# get_local_variable below.
@tf_export(v1=["get_variable"])
def get_variable(name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
return get_variable_scope().get_variable(
_get_default_variable_store(),
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
custom_getter=custom_getter,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
get_variable_or_local_docstring = ("""%s
%sThis function prefixes the name with the current variable scope
and performs reuse checks. See the
[Variable Scope How To](https://tensorflow.org/guide/variables)
for an extensive description of how reusing works. Here is a basic example:
```python
def foo():
with tf.variable_scope("foo", reuse=tf.AUTO_REUSE):
v = tf.get_variable("v", [1])
return v
v1 = foo() # Creates v.
v2 = foo() # Gets the same, existing v.
assert v1 == v2
```
If initializer is `None` (the default), the default initializer passed in
the variable scope will be used. If that one is `None` too, a
`glorot_uniform_initializer` will be used. The initializer can also be
a Tensor, in which case the variable is initialized to this value and shape.
Similarly, if the regularizer is `None` (the default), the default regularizer
passed in the variable scope will be used (if that is `None` too,
then by default no regularization is performed).
If a partitioner is provided, a `PartitionedVariable` is returned.
Accessing this object as a `Tensor` returns the shards concatenated along
the partition axis.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable if one is created. Can either be
an initializer object or a Tensor. If it's a Tensor, its shape must be known
unless validate_shape is False.
regularizer: A (Tensor -> Tensor or None) function; the result of
applying it on a newly created variable will be added to the collection
`tf.GraphKeys.REGULARIZATION_LOSSES` and can be used for regularization.
%scollections: List of graph collections keys to add the Variable to.
Defaults to `[%s]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device. If not `None`, caches on another device. Typical use is to
cache on the device where the Ops using the Variable reside, to
deduplicate copying through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a
value of unknown shape. If True, the default, the shape of initial_value
must be known. For this to be used the initializer must be a Tensor and
not an initializer object.
use_resource: If False, creates a regular Variable. If true, creates an
experimental ResourceVariable instead with well-defined semantics.
Defaults to False (will later change to True). When eager execution is
enabled this argument is always forced to be True.
custom_getter: Callable that takes as a first argument the true getter, and
allows overwriting the internal get_variable method.
The signature of `custom_getter` should match that of this method,
but the most future-proof version will allow for changes:
`def custom_getter(getter, *args, **kwargs)`. Direct access to
all `get_variable` parameters is also allowed:
`def custom_getter(getter, name, *args, **kwargs)`. A simple identity
custom getter that simply creates variables with modified names is:
```python
def custom_getter(getter, name, *args, **kwargs):
return getter(name + '_suffix', *args, **kwargs)
```
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value
(which must have the same shape). Constraints are not safe to
use when doing asynchronous distributed training.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
The created or existing `Variable` (or `PartitionedVariable`, if a
partitioner was used).
Raises:
ValueError: when creating a new variable and shape is not declared,
when violating reuse during variable creation, or when `initializer` dtype
and `dtype` don't match. Reuse is set inside `variable_scope`.
""")
get_variable.__doc__ = get_variable_or_local_docstring % (
"Gets an existing variable with these parameters or create a new one.", "",
"trainable: If `True` also add the variable to the graph collection\n"
" `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).\n ",
"GraphKeys.GLOBAL_VARIABLES")
# The argument list for get_local_variable must match arguments to get_variable.
# So, if you are updating the arguments, also update arguments to get_variable.
@tf_export(v1=["get_local_variable"])
def get_local_variable( # pylint: disable=missing-docstring
name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=False, # pylint: disable=unused-argument
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
custom_getter=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
if collections:
collections += [ops.GraphKeys.LOCAL_VARIABLES]
else:
collections = [ops.GraphKeys.LOCAL_VARIABLES]
return get_variable(
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=False,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation,
custom_getter=custom_getter,
constraint=constraint)
get_local_variable.__doc__ = get_variable_or_local_docstring % (
"Gets an existing *local* variable or creates a new one.",
"Behavior is the same as in `get_variable`, except that variables are\n"
"added to the `LOCAL_VARIABLES` collection and `trainable` is set to\n"
"`False`.\n", "", "GraphKeys.LOCAL_VARIABLES")
def _get_partitioned_variable(name,
shape=None,
dtype=None,
initializer=None,
regularizer=None,
trainable=True,
collections=None,
caching_device=None,
partitioner=None,
validate_shape=True,
use_resource=None,
constraint=None,
synchronization=VariableSynchronization.AUTO,
aggregation=VariableAggregation.NONE):
"""Gets or creates a sharded variable list with these parameters.
The `partitioner` must be a callable that accepts a fully defined
`TensorShape` and returns a sequence of integers (the `partitions`).
These integers describe how to partition the given sharded `Variable`
along the given dimension. That is, `partitions[1] = 3` means split
the `Variable` into 3 shards along dimension 1. Currently, sharding along
only one axis is supported.
If the list of variables with the given name (prefix) is already stored,
we return the stored variables. Otherwise, we create a new one.
If initializer is `None` (the default), the default initializer passed in
the constructor is used. If that one is `None` too, we use a new
`glorot_uniform_initializer`. If initializer is a Tensor, we use
it as a value and derive the shape from the initializer.
If the initializer is a callable, then it will be called for each
shard. Otherwise the initializer should match the shape of the entire
sharded Variable, and it will be sliced accordingly for each shard.
Some useful partitioners are available. See, e.g.,
`variable_axis_size_partitioner` and `min_max_variable_partitioner`.
Args:
name: The name of the new or existing variable.
shape: Shape of the new or existing variable.
dtype: Type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: Initializer for the variable if one is created.
regularizer: A (Tensor -> Tensor or None) function; the result of applying
it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: List of graph collections keys to add the Variable to. Defaults
to `[GraphKeys.GLOBAL_VARIABLES]` (see `tf.Variable`).
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache on the
device where the Ops using the Variable reside, to deduplicate copying
through `Switch` and other conditional statements.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and `dtype` of the Variable to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
validate_shape: If False, allows the variable to be initialized with a value
of unknown shape. If True, the default, the shape of initial_value must be
known.
use_resource: If False, creates a regular Variable. If True, creates an
experimental ResourceVariable instead which has well-defined semantics.
Defaults to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
synchronization: Indicates when a distributed a variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses when to synchronize.
If `synchronization` is set to `ON_READ`, `trainable` must not be set to
`True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
Returns:
A tuple `(shards, partitions)` where `shards` is the list of `Variable`
shards and `partitions` is the output of the partitioner on the input
shape.
Raises:
ValueError: when creating a new variable and shape is not declared,
or when violating reuse during variable creation. Reuse is set inside
`variable_scope`.
"""
# pylint: disable=protected-access
scope = get_variable_scope()
if scope.custom_getter is not None:
raise ValueError(
"Private access to _get_partitioned_variable is not allowed when "
"a custom getter is set. Current custom getter: %s. "
"It is likely that you're using create_partitioned_variables. "
"If so, consider instead using get_variable with a non-empty "
"partitioner parameter instead." % scope.custom_getter)
return scope._get_partitioned_variable(
_get_default_variable_store(),
name,
shape=shape,
dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner,
validate_shape=validate_shape,
use_resource=use_resource,
constraint=constraint,
synchronization=synchronization,
aggregation=aggregation)
# pylint: enable=protected-access
# Named like a function for compatibility with the previous
# @tf_contextlib.contextmanager definition.
class _pure_variable_scope(object): # pylint: disable=invalid-name
"""A context for the variable_scope, see `variable_scope` for docs."""
def __init__(self,
name_or_scope,
reuse=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
old_name_scope=None,
dtype=dtypes.float32,
use_resource=None,
constraint=None):
"""Creates a context for the variable_scope, see `variable_scope` for docs.
Note: this does not create a name scope.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
reuse: `True` or None, or tf.compat.v1.AUTO_REUSE; if `None`, we inherit
the parent scope's reuse flag.
initializer: default initializer for variables within this scope.
regularizer: default regularizer for variables within this scope.
caching_device: default caching device for variables within this scope.
partitioner: default partitioner for variables within this scope.
custom_getter: default custom getter for variables within this scope.
old_name_scope: the original name scope when re-entering a variable scope.
dtype: type of the variables within this scope (defaults to `DT_FLOAT`).
use_resource: If False, variables in this scope will be regular Variables.
If True, experimental ResourceVariables will be creates instead, with
well-defined semantics. Defaults to False (will later change to True).
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
"""
self._name_or_scope = name_or_scope
self._reuse = reuse
self._initializer = initializer
self._regularizer = regularizer
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._old_name_scope = old_name_scope
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
self._var_store = _get_default_variable_store()
self._var_scope_store = get_variable_scope_store()
self._last_variable_scope_object = None
if isinstance(self._name_or_scope, VariableScope):
self._new_name = self._name_or_scope.name
name_scope = self._name_or_scope._name_scope # pylint: disable=protected-access
# Handler for the case when we jump to a shared scope. We create a new
# VariableScope (self._var_scope_object) that contains a copy of the
# provided shared scope, possibly with changed reuse and initializer, if
# the user requested this.
variable_scope_object = VariableScope(
self._name_or_scope.reuse if not self._reuse else self._reuse,
name=self._new_name,
initializer=self._name_or_scope.initializer,
regularizer=self._name_or_scope.regularizer,
caching_device=self._name_or_scope.caching_device,
partitioner=self._name_or_scope.partitioner,
dtype=self._name_or_scope.dtype,
custom_getter=self._name_or_scope.custom_getter,
name_scope=name_scope,
use_resource=self._name_or_scope.use_resource,
constraint=self._constraint)
if self._initializer is not None:
variable_scope_object.set_initializer(self._initializer)
if self._regularizer is not None:
variable_scope_object.set_regularizer(self._regularizer)
if self._caching_device is not None:
variable_scope_object.set_caching_device(self._caching_device)
if self._partitioner is not None:
variable_scope_object.set_partitioner(self._partitioner)
if self._custom_getter is not None:
variable_scope_object.set_custom_getter(
_maybe_wrap_custom_getter(self._custom_getter,
self._name_or_scope.custom_getter))
if self._dtype is not None:
variable_scope_object.set_dtype(self._dtype)
if self._use_resource is not None:
variable_scope_object.set_use_resource(self._use_resource)
self._cached_variable_scope_object = variable_scope_object
def __enter__(self):
"""Begins the scope block.
Returns:
A VariableScope.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope, or if reuse is not `None` or `True`.
TypeError: when the types of some arguments are not appropriate.
"""
self._old = self._var_scope_store.current_scope
if isinstance(self._name_or_scope, VariableScope):
self._var_scope_store.open_variable_scope(self._new_name)
self._old_subscopes = copy.copy(
self._var_scope_store.variable_scopes_count)
variable_scope_object = self._cached_variable_scope_object
else:
# Handler for the case when we just prolong current variable scope.
# VariableScope with name extended by the provided one, and inherited
# reuse and initializer (except if the user provided values to set).
self._new_name = (
self._old.name + "/" +
self._name_or_scope if self._old.name else self._name_or_scope)
self._reuse = (self._reuse or
self._old.reuse) # Re-using is inherited by sub-scopes.
if self._old_name_scope is None:
name_scope = self._name_or_scope
else:
name_scope = self._old_name_scope
variable_scope_object = VariableScope(
self._reuse,
name=self._new_name,
initializer=self._old.initializer,
regularizer=self._old.regularizer,
caching_device=self._old.caching_device,
partitioner=self._old.partitioner,
dtype=self._old.dtype,
use_resource=self._old.use_resource,
custom_getter=self._old.custom_getter,
name_scope=name_scope,
constraint=self._constraint)
if self._initializer is not None:
variable_scope_object.set_initializer(self._initializer)
if self._regularizer is not None:
variable_scope_object.set_regularizer(self._regularizer)
if self._caching_device is not None:
variable_scope_object.set_caching_device(self._caching_device)
if self._partitioner is not None:
variable_scope_object.set_partitioner(self._partitioner)
if self._custom_getter is not None:
variable_scope_object.set_custom_getter(
_maybe_wrap_custom_getter(self._custom_getter,
self._old.custom_getter))
if self._dtype is not None:
variable_scope_object.set_dtype(self._dtype)
if self._use_resource is not None:
variable_scope_object.set_use_resource(self._use_resource)
self._var_scope_store.open_variable_scope(self._new_name)
self._var_scope_store.current_scope = variable_scope_object
self._last_variable_scope_object = variable_scope_object
return variable_scope_object
def __exit__(self, type_arg, value_arg, traceback_arg):
if (self._var_scope_store.current_scope is
not self._last_variable_scope_object):
raise RuntimeError("Improper nesting of variable_scope.")
# If jumping out from a non-prolonged scope, restore counts.
if isinstance(self._name_or_scope, VariableScope):
self._var_scope_store.variable_scopes_count = self._old_subscopes
else:
self._var_scope_store.close_variable_subscopes(self._new_name)
self._var_scope_store.current_scope = self._old
def _maybe_wrap_custom_getter(custom_getter, old_getter):
"""Wrap a call to a custom_getter to use the old_getter internally."""
if old_getter is None:
return custom_getter
# The new custom_getter should call the old one
def wrapped_custom_getter(getter, *args, **kwargs):
# Call:
# custom_getter(
# lambda: old_getter(true_getter, ...), *args, **kwargs)
# which means custom_getter will call old_getter, which
# will call the true_getter, perform any intermediate
# processing, and return the results to the current
# getter, which will also perform additional processing.
return custom_getter(functools.partial(old_getter, getter), *args, **kwargs)
return wrapped_custom_getter
def _get_unique_variable_scope(prefix):
"""Get a name with the given prefix unique in the current variable scope."""
var_scope_store = get_variable_scope_store()
current_scope = get_variable_scope()
name = current_scope.name + "/" + prefix if current_scope.name else prefix
if var_scope_store.variable_scope_count(name) == 0:
return prefix
idx = 1
while var_scope_store.variable_scope_count(name + ("_%d" % idx)) > 0:
idx += 1
return prefix + ("_%d" % idx)
# Named like a function for backwards compatibility with the
# @tf_contextlib.contextmanager version, which was switched to a class to avoid
# some object creation overhead.
@tf_export(v1=["variable_scope"]) # pylint: disable=invalid-name
class variable_scope(object):
"""A context manager for defining ops that creates variables (layers).
This context manager validates that the (optional) `values` are from the same
graph, ensures that graph is the default graph, and pushes a name scope and a
variable scope.
If `name_or_scope` is not None, it is used as is. If `name_or_scope` is None,
then `default_name` is used. In that case, if the same name has been
previously used in the same scope, it will be made unique by appending `_N`
to it.
Variable scope allows you to create new variables and to share already created
ones while providing checks to not create or share by accident. For details,
see the [Variable Scope How To](https://tensorflow.org/guide/variables), here
we present only a few basic examples.
Simple example of how to create a new variable:
```python
with tf.compat.v1.variable_scope("foo"):
with tf.compat.v1.variable_scope("bar"):
v = tf.compat.v1.get_variable("v", [1])
assert v.name == "foo/bar/v:0"
```
Simple example of how to reenter a premade variable scope safely:
```python
with tf.compat.v1.variable_scope("foo") as vs:
pass
# Re-enter the variable scope.
with tf.compat.v1.variable_scope(vs,
auxiliary_name_scope=False) as vs1:
# Restore the original name_scope.
with tf.name_scope(vs1.original_name_scope):
v = tf.compat.v1.get_variable("v", [1])
assert v.name == "foo/v:0"
c = tf.constant([1], name="c")
assert c.name == "foo/c:0"
```
Basic example of sharing a variable AUTO_REUSE:
```python
def foo():
with tf.compat.v1.variable_scope("foo", reuse=tf.compat.v1.AUTO_REUSE):
v = tf.compat.v1.get_variable("v", [1])
return v
v1 = foo() # Creates v.
v2 = foo() # Gets the same, existing v.
assert v1 == v2
```
Basic example of sharing a variable with reuse=True:
```python
with tf.compat.v1.variable_scope("foo"):
v = tf.compat.v1.get_variable("v", [1])
with tf.compat.v1.variable_scope("foo", reuse=True):
v1 = tf.compat.v1.get_variable("v", [1])
assert v1 == v
```
Sharing a variable by capturing a scope and setting reuse:
```python
with tf.compat.v1.variable_scope("foo") as scope:
v = tf.compat.v1.get_variable("v", [1])
scope.reuse_variables()
v1 = tf.compat.v1.get_variable("v", [1])
assert v1 == v
```
To prevent accidental sharing of variables, we raise an exception when getting
an existing variable in a non-reusing scope.
```python
with tf.compat.v1.variable_scope("foo"):
v = tf.compat.v1.get_variable("v", [1])
v1 = tf.compat.v1.get_variable("v", [1])
# Raises ValueError("... v already exists ...").
```
Similarly, we raise an exception when trying to get a variable that does not
exist in reuse mode.
```python
with tf.compat.v1.variable_scope("foo", reuse=True):
v = tf.compat.v1.get_variable("v", [1])
# Raises ValueError("... v does not exists ...").
```
Note that the `reuse` flag is inherited: if we open a reusing scope, then all
its sub-scopes become reusing as well.
A note about name scoping: Setting `reuse` does not impact the naming of other
ops such as mult. See related discussion on
[github#6189](https://github.com/tensorflow/tensorflow/issues/6189)
Note that up to and including version 1.0, it was allowed (though explicitly
discouraged) to pass False to the reuse argument, yielding undocumented
behaviour slightly different from None. Starting at 1.1.0 passing None and
False as reuse has exactly the same effect.
A note about using variable scopes in multi-threaded environment: Variable
scopes are thread local, so one thread will not see another thread's current
scope. Also, when using `default_name`, unique scopes names are also generated
only on a per thread basis. If the same name was used within a different
thread, that doesn't prevent a new thread from creating the same scope.
However, the underlying variable store is shared across threads (within the
same graph). As such, if another thread tries to create a new variable with
the same name as a variable created by a previous thread, it will fail unless
reuse is True.
Further, each thread starts with an empty variable scope. So if you wish to
preserve name prefixes from a scope from the main thread, you should capture
the main thread's scope and re-enter it in each thread. For e.g.
```
main_thread_scope = variable_scope.get_variable_scope()
# Thread's target function:
def thread_target_fn(captured_scope):
with variable_scope.variable_scope(captured_scope):
# .... regular code for this thread
thread = threading.Thread(target=thread_target_fn, args=(main_thread_scope,))
```
"""
def __init__(self,
name_or_scope,
default_name=None,
values=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
reuse=None,
dtype=None,
use_resource=None,
constraint=None,
auxiliary_name_scope=True):
"""Initialize the context manager.
Args:
name_or_scope: `string` or `VariableScope`: the scope to open.
default_name: The default name to use if the `name_or_scope` argument is
`None`, this name will be uniquified. If name_or_scope is provided it
won't be used and therefore it is not required and can be None.
values: The list of `Tensor` arguments that are passed to the op function.
initializer: default initializer for variables within this scope.
regularizer: default regularizer for variables within this scope.
caching_device: default caching device for variables within this scope.
partitioner: default partitioner for variables within this scope.
custom_getter: default custom getter for variables within this scope.
reuse: `True`, None, or tf.compat.v1.AUTO_REUSE; if `True`, we go into
reuse mode for this scope as well as all sub-scopes; if
tf.compat.v1.AUTO_REUSE, we create variables if they do not exist, and
return them otherwise; if None, we inherit the parent scope's reuse
flag. When eager execution is enabled, new variables are always created
unless an EagerVariableStore or template is currently active.
dtype: type of variables created in this scope (defaults to the type in
the passed scope, or inherited from parent scope).
use_resource: If False, all variables will be regular Variables. If True,
experimental ResourceVariables with well-defined semantics will be used
instead. Defaults to False (will later change to True). When eager
execution is enabled this argument is always forced to be True.
constraint: An optional projection function to be applied to the variable
after being updated by an `Optimizer` (e.g. used to implement norm
constraints or value constraints for layer weights). The function must
take as input the unprojected Tensor representing the value of the
variable and return the Tensor for the projected value (which must have
the same shape). Constraints are not safe to use when doing asynchronous
distributed training.
auxiliary_name_scope: If `True`, we create an auxiliary name scope with
the scope. If `False`, we don't create it. Note that the argument is not
inherited, and it only takes effect for once when creating. You should
only use it for re-entering a premade variable scope.
Returns:
A scope that can be captured and reused.
Raises:
ValueError: when trying to reuse within a create scope, or create within
a reuse scope.
TypeError: when the types of some arguments are not appropriate.
"""
self._name_or_scope = name_or_scope
self._default_name = default_name
self._values = values
self._initializer = initializer
self._regularizer = regularizer
self._caching_device = caching_device
self._partitioner = partitioner
self._custom_getter = custom_getter
self._reuse = reuse
self._dtype = dtype
self._use_resource = use_resource
self._constraint = constraint
if self._default_name is None and self._name_or_scope is None:
raise TypeError("If default_name is None then name_or_scope is required")
if self._reuse is False:
# We don't allow non-inheriting scopes, False = None here.
self._reuse = None
if not (self._reuse is True
or self._reuse is None
or self._reuse is AUTO_REUSE):
raise ValueError("The reuse parameter must be True or False or None.")
if self._values is None:
self._values = []
self._in_graph_mode = not context.executing_eagerly()
if self._in_graph_mode:
self._graph = ops._get_graph_from_inputs(self._values) # pylint: disable=protected-access
self._cached_pure_variable_scope = None
self._current_name_scope = None
if not isinstance(auxiliary_name_scope, bool):
raise TypeError("The auxiliary_name_scope must be `True` or `False`, "
"while get {}".format(auxiliary_name_scope))
self._auxiliary_name_scope = auxiliary_name_scope
def __enter__(self):
# If the default graph is building a function, then we should not replace it
# with the cached graph.
if ops.get_default_graph().building_function:
self._building_function = True
else:
self._building_function = False
if self._in_graph_mode and not self._building_function:
self._graph_context_manager = self._graph.as_default()
self._graph_context_manager.__enter__()
if self._cached_pure_variable_scope is not None:
# Fast path for re-entering variable_scopes. We've held on to the pure
# variable scope from a previous successful __enter__, so we avoid some
# overhead by re-using that object.
if self._current_name_scope is not None:
self._current_name_scope.__enter__()
return self._cached_pure_variable_scope.__enter__()
try:
return self._enter_scope_uncached()
finally:
if (self._in_graph_mode and not self._building_function and
self._graph_context_manager is not None):
self._graph_context_manager.__exit__(*sys.exc_info())
def _enter_scope_uncached(self):
"""Enters the context manager when there is no cached scope yet.
Returns:
The entered variable scope.
Raises:
TypeError: A wrong type is passed as `scope` at __init__().
ValueError: `reuse` is incorrectly set at __init__().
"""
if self._auxiliary_name_scope:
# Create a new name scope later
current_name_scope = None
else:
# Reenter the current name scope
name_scope = ops.get_name_scope()
if name_scope:
# Hack to reenter
name_scope += "/"
current_name_scope = ops.name_scope(name_scope)
else:
# Root scope
current_name_scope = ops.name_scope(name_scope)
# IMPORTANT: Only assign to self._cached_pure_variable_scope and
# self._current_name_scope after successful __enter__() calls.
if self._name_or_scope is not None:
if not isinstance(self._name_or_scope,
(VariableScope,) + six.string_types):
raise TypeError("VariableScope: name_or_scope must be a string or "
"VariableScope.")
if isinstance(self._name_or_scope, six.string_types):
name_scope = self._name_or_scope
else:
name_scope = self._name_or_scope.name.split("/")[-1]
if name_scope or current_name_scope:
current_name_scope = current_name_scope or ops.name_scope(name_scope)
try:
current_name_scope_name = current_name_scope.__enter__()
except:
current_name_scope.__exit__(*sys.exc_info())
raise
self._current_name_scope = current_name_scope
if isinstance(self._name_or_scope, six.string_types):
old_name_scope = current_name_scope_name
else:
old_name_scope = self._name_or_scope.original_name_scope
pure_variable_scope = _pure_variable_scope(
self._name_or_scope,
reuse=self._reuse,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
old_name_scope=old_name_scope,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
else:
self._current_name_scope = None
# This can only happen if someone is entering the root variable scope.
pure_variable_scope = _pure_variable_scope(
self._name_or_scope,
reuse=self._reuse,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
else: # Here name_or_scope is None. Using default name, but made unique.
if self._reuse:
raise ValueError("reuse=True cannot be used without a name_or_scope")
current_name_scope = current_name_scope or ops.name_scope(
self._default_name)
try:
current_name_scope_name = current_name_scope.__enter__()
except:
current_name_scope.__exit__(*sys.exc_info())
raise
self._current_name_scope = current_name_scope
unique_default_name = _get_unique_variable_scope(self._default_name)
pure_variable_scope = _pure_variable_scope(
unique_default_name,
initializer=self._initializer,
regularizer=self._regularizer,
caching_device=self._caching_device,
partitioner=self._partitioner,
custom_getter=self._custom_getter,
old_name_scope=current_name_scope_name,
dtype=self._dtype,
use_resource=self._use_resource,
constraint=self._constraint)
try:
entered_pure_variable_scope = pure_variable_scope.__enter__()
except:
pure_variable_scope.__exit__(*sys.exc_info())
raise
self._cached_pure_variable_scope = pure_variable_scope
return entered_pure_variable_scope
def __exit__(self, type_arg, value_arg, traceback_arg):
self._cached_pure_variable_scope.__exit__(type_arg, value_arg,
traceback_arg)
if self._current_name_scope:
self._current_name_scope.__exit__(type_arg, value_arg, traceback_arg)
if self._in_graph_mode and not self._building_function:
self._graph_context_manager.__exit__(type_arg, value_arg, traceback_arg)
# pylint: disable=g-doc-return-or-yield
@tf_export(v1=["variable_op_scope"])
@tf_contextlib.contextmanager
def variable_op_scope(values,
name_or_scope,
default_name=None,
initializer=None,
regularizer=None,
caching_device=None,
partitioner=None,
custom_getter=None,
reuse=None,
dtype=None,
use_resource=None,
constraint=None):
"""Deprecated: context manager for defining an op that creates variables."""
logging.warn("tf.variable_op_scope(values, name, default_name) is deprecated,"
" use tf.variable_scope(name, default_name, values)")
with variable_scope(
name_or_scope,
default_name=default_name,
values=values,
initializer=initializer,
regularizer=regularizer,
caching_device=caching_device,
partitioner=partitioner,
custom_getter=custom_getter,
reuse=reuse,
dtype=dtype,
use_resource=use_resource,
constraint=constraint) as scope:
yield scope
def _call_partitioner(partitioner, shape, dtype):
"""Call partitioner validating its inputs/output.
Args:
partitioner: a function mapping `Tensor` shape and dtype to a list of
partitions.
shape: shape of the `Tensor` to partition, must have at least two
dimensions.
dtype: dtype of the elements in the `Tensor`.
Returns:
A list with elements >=1 and exactly one >1. The index of that
element corresponds to the partitioning axis.
"""
if not shape.is_fully_defined():
raise ValueError("Shape of a new partitioned variable must be "
"fully defined, but instead was %s." % (shape,))
if shape.ndims < 1:
raise ValueError("A partitioned Variable must have rank at least 1, "
"shape: %s" % shape)
slicing = partitioner(shape=shape, dtype=dtype)
if not isinstance(slicing, collections_lib.Sequence):
raise ValueError("Partitioner must return a sequence, but saw: %s" %
slicing)
if len(slicing) != shape.ndims:
raise ValueError(
"Partitioner returned a partition list that does not match the "
"Variable's rank: %s vs. %s" % (slicing, shape))
if any(p < 1 for p in slicing):
raise ValueError("Partitioner returned zero partitions for some axes: %s" %
slicing)
if sum(p > 1 for p in slicing) > 1:
raise ValueError("Can only slice a variable along one dimension: "
"shape: %s, partitioning: %s" % (shape, slicing))
return slicing
# TODO(slebedev): could be inlined, but
# `_VariableStore._get_partitioned_variable` is too complex even
# without this logic.
def _get_slice_dim_and_num_slices(slicing):
"""Get slicing dimension and number of slices from the partitioner output."""
for slice_dim, num_slices in enumerate(slicing):
if num_slices > 1:
break
else:
# Degenerate case: no partitioning applied.
slice_dim = 0
num_slices = 1
return slice_dim, num_slices
def _iter_slices(full_shape, num_slices, slice_dim):
"""Slices a given a shape along the specified dimension."""
num_slices_with_excess = full_shape[slice_dim] % num_slices
offset = [0] * len(full_shape)
min_slice_len = full_shape[slice_dim] // num_slices
for i in xrange(num_slices):
shape = full_shape[:]
shape[slice_dim] = min_slice_len + bool(i < num_slices_with_excess)
yield offset[:], shape
offset[slice_dim] += shape[slice_dim]
def default_variable_creator(next_creator=None, **kwargs):
"""Default variable creator."""
assert next_creator is None
initial_value = kwargs.get("initial_value", None)
trainable = kwargs.get("trainable", None)
collections = kwargs.get("collections", None)
validate_shape = kwargs.get("validate_shape", True)
caching_device = kwargs.get("caching_device", None)
name = kwargs.get("name", None)
variable_def = kwargs.get("variable_def", None)
dtype = kwargs.get("dtype", None)
expected_shape = kwargs.get("expected_shape", None)
import_scope = kwargs.get("import_scope", None)
constraint = kwargs.get("constraint", None)
use_resource = kwargs.get("use_resource", None)
synchronization = kwargs.get("synchronization", None)
aggregation = kwargs.get("aggregation", None)
shape = kwargs.get("shape", None)
if use_resource is None:
use_resource = get_variable_scope().use_resource
if use_resource is None:
use_resource = _DEFAULT_USE_RESOURCE
use_resource = use_resource or context.executing_eagerly()
if use_resource:
distribute_strategy = kwargs.get("distribute_strategy", None)
return resource_variable_ops.ResourceVariable(
initial_value=initial_value,
trainable=trainable,
collections=collections,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
import_scope=import_scope,
distribute_strategy=distribute_strategy,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
else:
return variables.RefVariable(
initial_value=initial_value,
trainable=trainable,
collections=collections,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
expected_shape=expected_shape,
import_scope=import_scope,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
def default_variable_creator_v2(next_creator=None, **kwargs):
"""Default variable creator."""
assert next_creator is None
initial_value = kwargs.get("initial_value", None)
trainable = kwargs.get("trainable", None)
validate_shape = kwargs.get("validate_shape", True)
caching_device = kwargs.get("caching_device", None)
name = kwargs.get("name", None)
variable_def = kwargs.get("variable_def", None)
dtype = kwargs.get("dtype", None)
import_scope = kwargs.get("import_scope", None)
constraint = kwargs.get("constraint", None)
distribute_strategy = kwargs.get("distribute_strategy", None)
synchronization = kwargs.get("synchronization", None)
aggregation = kwargs.get("aggregation", None)
shape = kwargs.get("shape", None)
return resource_variable_ops.ResourceVariable(
initial_value=initial_value,
trainable=trainable,
validate_shape=validate_shape,
caching_device=caching_device,
name=name,
dtype=dtype,
constraint=constraint,
variable_def=variable_def,
import_scope=import_scope,
distribute_strategy=distribute_strategy,
synchronization=synchronization,
aggregation=aggregation,
shape=shape)
variables.default_variable_creator = default_variable_creator
variables.default_variable_creator_v2 = default_variable_creator_v2
def _make_getter(captured_getter, captured_previous):
"""Gets around capturing loop variables in python being broken."""
return lambda **kwargs: captured_getter(captured_previous, **kwargs)
# TODO(apassos) remove forwarding symbol
variable = variables.VariableV1
@tf_export(v1=["variable_creator_scope"])
@tf_contextlib.contextmanager
def variable_creator_scope_v1(variable_creator):
"""Scope which defines a variable creation function to be used by variable().
variable_creator is expected to be a function with the following signature:
```
def variable_creator(next_creator, **kwargs)
```
The creator is supposed to eventually call the next_creator to create a
variable if it does want to create a variable and not call Variable or
ResourceVariable directly. This helps make creators composable. A creator may
choose to create multiple variables, return already existing variables, or
simply register that a variable was created and defer to the next creators in
line. Creators can also modify the keyword arguments seen by the next
creators.
Custom getters in the variable scope will eventually resolve down to these
custom creators when they do create variables.
The valid keyword arguments in kwds are:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
which is the initial value for the Variable. The initial value must have
a shape specified unless `validate_shape` is set to False. Can also be a
callable with no argument that returns the initial value when called. In
that case, `dtype` must be specified. (Note that initializer functions
from init_ops.py must first be bound to a shape before being used here.)
trainable: If `True`, the default, also adds the variable to the graph
collection `GraphKeys.TRAINABLE_VARIABLES`. This collection is used as
the default list of variables to use by the `Optimizer` classes.
`trainable` defaults to `True` unless `synchronization` is
set to `ON_READ`.
collections: List of graph collections keys. The new variable is added to
these collections. Defaults to `[GraphKeys.GLOBAL_VARIABLES]`.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
caching_device: Optional device string describing where the Variable
should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache
on the device where the Ops using the Variable reside, to deduplicate
copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
dtype: If set, initial_value will be converted to the given type.
If `None`, either the datatype will be kept (if `initial_value` is
a Tensor), or `convert_to_tensor` will decide.
constraint: A constraint function to be applied to the variable after
updates by some algorithms.
use_resource: if True, a ResourceVariable is always created.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
This set may grow over time, so it's important the signature of creators is as
mentioned above.
Args:
variable_creator: the passed creator
Yields:
A scope in which the creator is active
"""
with ops.get_default_graph()._variable_creator_scope(variable_creator): # pylint: disable=protected-access
yield
# Note: only the docstrings differ between this and v1.
@tf_export("variable_creator_scope", v1=[])
@tf_contextlib.contextmanager
def variable_creator_scope(variable_creator):
"""Scope which defines a variable creation function to be used by variable().
variable_creator is expected to be a function with the following signature:
```
def variable_creator(next_creator, **kwargs)
```
The creator is supposed to eventually call the next_creator to create a
variable if it does want to create a variable and not call Variable or
ResourceVariable directly. This helps make creators composable. A creator may
choose to create multiple variables, return already existing variables, or
simply register that a variable was created and defer to the next creators in
line. Creators can also modify the keyword arguments seen by the next
creators.
Custom getters in the variable scope will eventually resolve down to these
custom creators when they do create variables.
The valid keyword arguments in kwds are:
initial_value: A `Tensor`, or Python object convertible to a `Tensor`,
which is the initial value for the Variable. The initial value must have
a shape specified unless `validate_shape` is set to False. Can also be a
callable with no argument that returns the initial value when called. In
that case, `dtype` must be specified. (Note that initializer functions
from init_ops.py must first be bound to a shape before being used here.)
trainable: If `True`, the default, GradientTapes automatically watch
uses of this Variable.
validate_shape: If `False`, allows the variable to be initialized with a
value of unknown shape. If `True`, the default, the shape of
`initial_value` must be known.
caching_device: Optional device string describing where the Variable
should be cached for reading. Defaults to the Variable's device.
If not `None`, caches on another device. Typical use is to cache
on the device where the Ops using the Variable reside, to deduplicate
copying through `Switch` and other conditional statements.
name: Optional name for the variable. Defaults to `'Variable'` and gets
uniquified automatically.
dtype: If set, initial_value will be converted to the given type.
If `None`, either the datatype will be kept (if `initial_value` is
a Tensor), or `convert_to_tensor` will decide.
constraint: A constraint function to be applied to the variable after
updates by some algorithms.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
This set may grow over time, so it's important the signature of creators is as
mentioned above.
Args:
variable_creator: the passed creator
Yields:
A scope in which the creator is active
"""
with ops.get_default_graph()._variable_creator_scope(variable_creator): # pylint: disable=protected-access
yield
| ghchinoy/tensorflow | tensorflow/python/ops/variable_scope.py | Python | apache-2.0 | 113,647 |
package cherry.fundamental.db.gen.query;
import static com.querydsl.core.types.PathMetadataFactory.*;
import com.querydsl.core.types.dsl.*;
import com.querydsl.core.types.PathMetadata;
import javax.annotation.Generated;
import com.querydsl.core.types.Path;
import com.querydsl.sql.ColumnMetadata;
import java.sql.Types;
/**
* QVerifyDatetime is a Querydsl query type for BVerifyDatetime
*/
@Generated("com.querydsl.sql.codegen.MetaDataSerializer")
public class QVerifyDatetime extends com.querydsl.sql.RelationalPathBase<BVerifyDatetime> {
private static final long serialVersionUID = 533014528;
public static final QVerifyDatetime verifyDatetime = new QVerifyDatetime("VERIFY_DATETIME");
public final DatePath<java.time.LocalDate> dt = createDate("dt", java.time.LocalDate.class);
public final DateTimePath<java.time.LocalDateTime> dtm = createDateTime("dtm", java.time.LocalDateTime.class);
public final NumberPath<Long> id = createNumber("id", Long.class);
public final TimePath<java.time.LocalTime> tm = createTime("tm", java.time.LocalTime.class);
public final com.querydsl.sql.PrimaryKey<BVerifyDatetime> verifyDatetimePkc = createPrimaryKey(id);
public QVerifyDatetime(String variable) {
super(BVerifyDatetime.class, forVariable(variable), "PUBLIC", "VERIFY_DATETIME");
addMetadata();
}
public QVerifyDatetime(String variable, String schema, String table) {
super(BVerifyDatetime.class, forVariable(variable), schema, table);
addMetadata();
}
public QVerifyDatetime(Path<? extends BVerifyDatetime> path) {
super(path.getType(), path.getMetadata(), "PUBLIC", "VERIFY_DATETIME");
addMetadata();
}
public QVerifyDatetime(PathMetadata metadata) {
super(BVerifyDatetime.class, metadata, "PUBLIC", "VERIFY_DATETIME");
addMetadata();
}
public void addMetadata() {
addMetadata(dt, ColumnMetadata.named("DT").withIndex(2).ofType(Types.DATE).withSize(8));
addMetadata(dtm, ColumnMetadata.named("DTM").withIndex(4).ofType(Types.TIMESTAMP).withSize(23).withDigits(10));
addMetadata(id, ColumnMetadata.named("ID").withIndex(1).ofType(Types.BIGINT).withSize(19).notNull());
addMetadata(tm, ColumnMetadata.named("TM").withIndex(3).ofType(Types.TIME).withSize(6));
}
}
| agwlvssainokuni/springapp2 | corelib/fundamental/src/testGenerated/java/cherry/fundamental/db/gen/query/QVerifyDatetime.java | Java | apache-2.0 | 2,348 |
package org.docksidestage.sqlserver.dbflute.cbean.cq.ciq;
import java.util.Map;
import org.dbflute.cbean.*;
import org.dbflute.cbean.ckey.*;
import org.dbflute.cbean.coption.ConditionOption;
import org.dbflute.cbean.cvalue.ConditionValue;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.exception.IllegalConditionBeanOperationException;
import org.docksidestage.sqlserver.dbflute.cbean.*;
import org.docksidestage.sqlserver.dbflute.cbean.cq.bs.*;
import org.docksidestage.sqlserver.dbflute.cbean.cq.*;
/**
* The condition-query for in-line of SUMMARY_WITHDRAWAL.
* @author DBFlute(AutoGenerator)
*/
public class SummaryWithdrawalCIQ extends AbstractBsSummaryWithdrawalCQ {
// ===================================================================================
// Attribute
// =========
protected BsSummaryWithdrawalCQ _myCQ;
// ===================================================================================
// Constructor
// ===========
public SummaryWithdrawalCIQ(ConditionQuery referrerQuery, SqlClause sqlClause
, String aliasName, int nestLevel, BsSummaryWithdrawalCQ myCQ) {
super(referrerQuery, sqlClause, aliasName, nestLevel);
_myCQ = myCQ;
_foreignPropertyName = _myCQ.xgetForeignPropertyName(); // accept foreign property name
_relationPath = _myCQ.xgetRelationPath(); // accept relation path
_inline = true;
}
// ===================================================================================
// Override about Register
// =======================
protected void reflectRelationOnUnionQuery(ConditionQuery bq, ConditionQuery uq)
{ throw new IllegalConditionBeanOperationException("InlineView cannot use Union: " + bq + " : " + uq); }
@Override
protected void setupConditionValueAndRegisterWhereClause(ConditionKey k, Object v, ConditionValue cv, String col)
{ regIQ(k, v, cv, col); }
@Override
protected void setupConditionValueAndRegisterWhereClause(ConditionKey k, Object v, ConditionValue cv, String col, ConditionOption op)
{ regIQ(k, v, cv, col, op); }
@Override
protected void registerWhereClause(String wc)
{ registerInlineWhereClause(wc); }
@Override
protected boolean isInScopeRelationSuppressLocalAliasName() {
if (_onClause) { throw new IllegalConditionBeanOperationException("InScopeRelation on OnClause is unsupported."); }
return true;
}
// ===================================================================================
// Override about Query
// ====================
protected ConditionValue xgetCValueMemberId() { return _myCQ.xdfgetMemberId(); }
protected ConditionValue xgetCValueMemberName() { return _myCQ.xdfgetMemberName(); }
protected ConditionValue xgetCValueWithdrawalReasonCode() { return _myCQ.xdfgetWithdrawalReasonCode(); }
protected ConditionValue xgetCValueWithdrawalReasonText() { return _myCQ.xdfgetWithdrawalReasonText(); }
protected ConditionValue xgetCValueWithdrawalReasonInputText() { return _myCQ.xdfgetWithdrawalReasonInputText(); }
protected ConditionValue xgetCValueWithdrawalDatetime() { return _myCQ.xdfgetWithdrawalDatetime(); }
protected ConditionValue xgetCValueMemberStatusCode() { return _myCQ.xdfgetMemberStatusCode(); }
protected ConditionValue xgetCValueMemberStatusName() { return _myCQ.xdfgetMemberStatusName(); }
protected ConditionValue xgetCValueMaxPurchasePrice() { return _myCQ.xdfgetMaxPurchasePrice(); }
protected Map<String, Object> xfindFixedConditionDynamicParameterMap(String pp) { return null; }
public String keepScalarCondition(SummaryWithdrawalCQ sq)
{ throwIICBOE("ScalarCondition"); return null; }
protected void throwIICBOE(String name)
{ throw new IllegalConditionBeanOperationException(name + " at InlineView is unsupported."); }
// ===================================================================================
// Very Internal
// =============
// very internal (for suppressing warn about 'Not Use Import')
protected String xinCB() { return SummaryWithdrawalCB.class.getName(); }
protected String xinCQ() { return SummaryWithdrawalCQ.class.getName(); }
}
| dbflute-test/dbflute-test-dbms-sqlserver | src/main/java/org/docksidestage/sqlserver/dbflute/cbean/cq/ciq/SummaryWithdrawalCIQ.java | Java | apache-2.0 | 4,946 |
package ch11holding;
import java.util.*;
/**
* <pre>
* Output:
* 0
* 1
* 2
* 0
* 1
* 2
* </pre>
*/
public class D02_ApplesAndOrangesWithGenerics {
public static void main(String[] args) {
ArrayList<Apple> apples = new ArrayList<Apple>();
for (int i = 0; i < 3; i++)
apples.add(new Apple());
// Compile-time error:
// apples.add(new Orange());
for (int i = 0; i < apples.size(); i++)
System.out.println(apples.get(i).id());
// Using foreach:
for (Apple c : apples)
System.out.println(c.id());
}
} | deguo/tij4 | src/main/java/ch11holding/D02_ApplesAndOrangesWithGenerics.java | Java | apache-2.0 | 532 |
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package certs
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/x509"
"crypto/x509/pkix"
"encoding/pem"
"errors"
"fmt"
"io/ioutil"
"math/big"
"os"
"time"
log "github.com/golang/glog"
cpb "github.com/google/fleetspeak/fleetspeak/src/config/proto/fleetspeak_config"
)
// GetTrustedCert returns the trusted certificate associated with cfg, creating
// it if necessary. If available, priv is the private key associated with cert.
func GetTrustedCert(cfg *cpb.Config) (cert *x509.Certificate, priv interface{}, pem []byte, err error) {
if cfg.TrustedCertFile == "" {
return nil, nil, nil, errors.New("trusted_cert_file not set")
}
if _, err := os.Stat(cfg.TrustedCertFile); err != nil {
if os.IsNotExist(err) {
// Attempt to create a CA certificate.
if err := makeCACert(cfg); err != nil {
return nil, nil, nil, err
}
} else {
return nil, nil, nil, fmt.Errorf("unable to stat trusted_cert_file [%s]: %v", cfg.TrustedCertFile, err)
}
}
return getTrustedCert(cfg)
}
func makeCACert(cfg *cpb.Config) error {
if cfg.TrustedCertKeyFile == "" {
return errors.New("unable to create a CA cert: trusted_cert_key_file not set")
}
privKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
if err != nil {
return fmt.Errorf("unable to create a CA cert: key generation failed: %v", err)
}
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
if err != nil {
return fmt.Errorf("unable to create a CA cert: serial number generation failed: %v", err)
}
tmpl := x509.Certificate{
Version: 1,
SerialNumber: serialNumber,
Subject: pkix.Name{CommonName: fmt.Sprintf("%s Fleetspeak CA", cfg.ConfigurationName)},
NotBefore: time.Now(),
NotAfter: time.Now().Add(10 * 24 * 365 * time.Hour), // 10 years from now
KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign,
IsCA: true,
BasicConstraintsValid: true,
}
cert, err := x509.CreateCertificate(rand.Reader, &tmpl, &tmpl, privKey.Public(), privKey)
if err != nil {
return fmt.Errorf("unable to create a CA cert: %v", err)
}
certPEM := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: cert})
key, err := x509.MarshalECPrivateKey(privKey)
if err != nil {
return fmt.Errorf("unable to create CA cert: failed to marshal private key: %v", err)
}
keyPEM := pem.EncodeToMemory(&pem.Block{Type: "EC PRIVATE KEY", Bytes: key})
if err := ioutil.WriteFile(cfg.TrustedCertFile, certPEM, 0644); err != nil {
return fmt.Errorf("failed to write CA cert file [%s]: %v", cfg.TrustedCertFile, err)
}
if err := ioutil.WriteFile(cfg.TrustedCertKeyFile, keyPEM, 0600); err != nil {
return fmt.Errorf("failed to write CA key file [%s]: %v", cfg.TrustedCertKeyFile, err)
}
return nil
}
func getTrustedCert(cfg *cpb.Config) (cert *x509.Certificate, priv interface{}, certPEM []byte, err error) {
// Read and validate the cert.
certPEM, err = ioutil.ReadFile(cfg.TrustedCertFile)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to read trusted certificate file [%s]: %v", cfg.TrustedCertFile, err)
}
certBlock, _ := pem.Decode(certPEM)
if certBlock == nil || certBlock.Type != "CERTIFICATE" {
return nil, nil, nil, fmt.Errorf("trusted certificate file [%s] does not appear to contain a PEM format certificate", cfg.TrustedCertFile)
}
cert, err = x509.ParseCertificate(certBlock.Bytes)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to parse trusted certificate file [%s]: %v", cfg.TrustedCertFile, err)
}
// Read the key file, if present.
keyPEM, err := ioutil.ReadFile(cfg.TrustedCertKeyFile)
if err != nil {
log.Infof("unable to read the trusted certificate key file [%s], server certificate creation disabled: %v", cfg.TrustedCertKeyFile, err)
return cert, nil, certPEM, nil
}
keyBlock, _ := pem.Decode(keyPEM)
if keyBlock == nil {
return nil, nil, nil, fmt.Errorf("trusted certificate key file [%s] does not appear to contain a PEM format key", cfg.TrustedCertKeyFile)
}
switch keyBlock.Type {
case "RSA PRIVATE KEY":
priv, err = x509.ParsePKCS1PrivateKey(keyBlock.Bytes)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to parse RSA key in certificate key file [%s]: %v", cfg.TrustedCertKeyFile, err)
}
return cert, priv, certPEM, nil
case "EC PRIVATE KEY":
priv, err = x509.ParseECPrivateKey(keyBlock.Bytes)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to parse EC key in certificate key file [%s]: %v", cfg.TrustedCertKeyFile, err)
}
return cert, priv, certPEM, nil
default:
return nil, nil, nil, fmt.Errorf("unsupport PEM block type [%s] in certificate key file [%s]", keyBlock.Type, cfg.TrustedCertKeyFile)
}
return cert, priv, certPEM, nil
}
| google/fleetspeak | fleetspeak/src/config/certs/trusted.go | GO | apache-2.0 | 5,442 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.tree;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static java.util.Objects.requireNonNull;
public class LikePredicate
extends Expression
{
private final Expression value;
private final Expression pattern;
private final Expression escape;
public LikePredicate(Expression value, Expression pattern, Expression escape)
{
this(Optional.empty(), value, pattern, escape);
}
public LikePredicate(NodeLocation location, Expression value, Expression pattern, Expression escape)
{
this(Optional.of(location), value, pattern, escape);
}
// TODO cleanup LikePredicate so that escape is always passed using Optional
public LikePredicate(Expression value, Expression pattern, Optional<Expression> escape)
{
this(Optional.empty(), value, pattern, requireNonNull(escape, "escape is null").isPresent() ? escape.get() : null);
}
private LikePredicate(Optional<NodeLocation> location, Expression value, Expression pattern, Expression escape)
{
super(location);
requireNonNull(value, "value is null");
requireNonNull(pattern, "pattern is null");
this.value = value;
this.pattern = pattern;
this.escape = escape;
}
public Expression getValue()
{
return value;
}
public Expression getPattern()
{
return pattern;
}
public Expression getEscape()
{
return escape;
}
@Override
public <R, C> R accept(AstVisitor<R, C> visitor, C context)
{
return visitor.visitLikePredicate(this, context);
}
@Override
public List<Node> getChildren()
{
ImmutableList.Builder<Node> result = ImmutableList.<Node>builder()
.add(value)
.add(pattern);
if (escape != null) {
result.add(escape);
}
return result.build();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LikePredicate that = (LikePredicate) o;
return Objects.equals(value, that.value) &&
Objects.equals(pattern, that.pattern) &&
Objects.equals(escape, that.escape);
}
@Override
public int hashCode()
{
return Objects.hash(value, pattern, escape);
}
}
| yuananf/presto | presto-parser/src/main/java/com/facebook/presto/sql/tree/LikePredicate.java | Java | apache-2.0 | 3,122 |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.dashclock;
import com.google.android.apps.dashclock.api.DashClockExtension;
import android.app.Service;
import android.appwidget.AppWidgetManager;
import android.content.ComponentName;
import android.content.Intent;
import android.os.Handler;
import android.os.IBinder;
import android.text.TextUtils;
import static com.google.android.apps.dashclock.LogUtils.LOGD;
/**
* The primary service for DashClock. This service is in charge of updating widget UI (see {@link
* #ACTION_UPDATE_WIDGETS}) and updating extension data via an internal instance of {@link
* ExtensionHost} (see {@link #ACTION_UPDATE_EXTENSIONS}).
*/
public class DashClockService extends Service implements ExtensionManager.OnChangeListener {
private static final String TAG = LogUtils.makeLogTag(DashClockService.class);
/**
* Intent action for updating widget views. If {@link #EXTRA_APPWIDGET_ID} is provided, updates
* only that widget. Otherwise, updates all widgets.
*/
public static final String ACTION_UPDATE_WIDGETS =
"com.google.android.apps.dashclock.action.UPDATE_WIDGETS";
public static final String EXTRA_APPWIDGET_ID =
"com.google.android.apps.dashclock.extra.APPWIDGET_ID";
/**
* Intent action for telling extensions to update their data. If {@link #EXTRA_COMPONENT_NAME}
* is provided, updates only that extension. Otherwise, updates all active extensions. Also
* optional is {@link #EXTRA_UPDATE_REASON} (see {@link DashClockExtension} for update reasons).
*/
public static final String ACTION_UPDATE_EXTENSIONS =
"com.google.android.apps.dashclock.action.UPDATE_EXTENSIONS";
public static final String EXTRA_COMPONENT_NAME =
"com.google.android.apps.dashclock.extra.COMPONENT_NAME";
public static final String EXTRA_UPDATE_REASON =
"com.google.android.apps.dashclock.extra.UPDATE_REASON";
private ExtensionManager mExtensionManager;
private ExtensionHost mExtensionHost;
private Handler mUpdateHandler = new Handler();
@Override
public void onCreate() {
super.onCreate();
mExtensionManager = ExtensionManager.getInstance(this);
mExtensionManager.addOnChangeListener(this);
mExtensionHost = new ExtensionHost(this);
}
@Override
public void onDestroy() {
super.onDestroy();
mUpdateHandler.removeCallbacksAndMessages(null);
mExtensionManager.removeOnChangeListener(this);
mExtensionHost.destroy();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (intent != null) {
String action = intent.getAction();
if (ACTION_UPDATE_WIDGETS.equals(action)) {
handleUpdateWidgets(intent);
} else if (ACTION_UPDATE_EXTENSIONS.equals(action)) {
handleUpdateExtensions(intent);
}
}
return START_STICKY;
}
@Override
public void onExtensionsChanged() {
mUpdateHandler.removeCallbacks(mUpdateAllWidgetsRunnable);
mUpdateHandler.postDelayed(mUpdateAllWidgetsRunnable, 2000);
}
private Runnable mUpdateAllWidgetsRunnable = new Runnable() {
@Override
public void run() {
handleUpdateWidgets(new Intent());
}
};
/**
* Updates a widget's UI.
*/
private void handleUpdateWidgets(Intent intent) {
AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(this);
// Either update all app widgets, or only those which were requested.
int appWidgetIds[];
if (intent.hasExtra(EXTRA_APPWIDGET_ID)) {
appWidgetIds = new int[]{intent.getIntExtra(EXTRA_APPWIDGET_ID, -1)};
} else {
appWidgetIds = appWidgetManager.getAppWidgetIds(new ComponentName(
this, WidgetProvider.class));
}
StringBuilder sb = new StringBuilder();
for (int appWidgetId : appWidgetIds) {
sb.append(appWidgetId).append(" ");
}
LOGD(TAG, "Rendering widgets with appWidgetId(s): " + sb);
WidgetRenderer.renderWidgets(this, appWidgetIds);
}
/**
* Asks extensions to provide data updates.
*/
private void handleUpdateExtensions(Intent intent) {
int reason = intent.getIntExtra(EXTRA_UPDATE_REASON,
DashClockExtension.UPDATE_REASON_UNKNOWN);
// Either update all extensions, or only the requested one.
String updateExtension = intent.getStringExtra(EXTRA_COMPONENT_NAME);
if (!TextUtils.isEmpty(updateExtension)) {
ComponentName cn = ComponentName.unflattenFromString(updateExtension);
mExtensionHost.execute(cn, ExtensionHost.UPDATE_OPERATIONS.get(reason));
} else {
for (ComponentName cn : mExtensionManager.getActiveExtensionNames()) {
mExtensionHost.execute(cn, ExtensionHost.UPDATE_OPERATIONS.get(reason));
}
}
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
}
| mnm9994u/Stock_DashClock | main/src/com/google/android/apps/dashclock/DashClockService.java | Java | apache-2.0 | 5,742 |
/**
* Copyright 2008 Marvin Herman Froeder
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*
*/
package org.sonatype.flexmojos.tests.concept;
import org.testng.annotations.Test;
public class AdvancedFlexTest
extends AbstractConceptTest
{
@Test
public void testAdvancedFlexUnitExample()
throws Exception
{
standardConceptTester( "advancedflex-example" );
}
}
| edward-yakop/flexmojos | flexmojos-testing/flexmojos-test-harness/src/test/java/org/sonatype/flexmojos/tests/concept/AdvancedFlexTest.java | Java | apache-2.0 | 907 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.framework;
import java.util.*;
import org.apache.felix.framework.util.Util;
import org.apache.felix.framework.util.VersionRange;
import org.osgi.framework.Bundle;
import org.osgi.framework.Constants;
import org.osgi.framework.Version;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.framework.wiring.BundleRevisions;
import org.osgi.framework.wiring.BundleWire;
import org.osgi.framework.wiring.FrameworkWiring;
import org.osgi.service.packageadmin.ExportedPackage;
import org.osgi.service.packageadmin.PackageAdmin;
import org.osgi.service.packageadmin.RequiredBundle;
public class PackageAdminImpl implements PackageAdmin
{
private static final Comparator COMPARATOR = new Comparator() {
public int compare(Object o1, Object o2)
{
// Reverse arguments to sort in descending order.
return ((ExportedPackage) o2).getVersion().compareTo(
((ExportedPackage) o1).getVersion());
}
};
private Felix m_felix = null;
PackageAdminImpl(Felix felix)
{
m_felix = felix;
}
/**
* Returns the bundle associated with this class if the class was
* loaded from a bundle, otherwise returns null.
*
* @param clazz the class for which to determine its associated bundle.
* @return the bundle associated with the specified class, otherwise null.
**/
public Bundle getBundle(Class clazz)
{
return m_felix.getBundle(clazz);
}
/**
* Returns all bundles that have a specified symbolic name and whose
* version is in the specified version range. If no version range is
* specified, then all bundles with the specified symbolic name are
* returned. The array is sorted in descending version order.
*
* @param symbolicName the target symbolic name.
* @param versionRange the target version range.
* @return an array of matching bundles sorted in descending version order.
**/
public Bundle[] getBundles(String symbolicName, String versionRange)
{
VersionRange vr = (versionRange == null) ? null : VersionRange.parse(versionRange);
Bundle[] bundles = m_felix.getBundles();
List list = new ArrayList();
for (int i = 0; (bundles != null) && (i < bundles.length); i++)
{
String sym = bundles[i].getSymbolicName();
if ((sym != null) && sym.equals(symbolicName))
{
Version v = bundles[i].adapt(BundleRevision.class).getVersion();
if ((vr == null) || vr.isInRange(v))
{
list.add(bundles[i]);
}
}
}
if (list.isEmpty())
{
return null;
}
bundles = (Bundle[]) list.toArray(new Bundle[list.size()]);
Arrays.sort(bundles,new Comparator() {
public int compare(Object o1, Object o2)
{
Version v1 = ((Bundle) o1).adapt(BundleRevision.class).getVersion();
Version v2 = ((Bundle) o2).adapt(BundleRevision.class).getVersion();
// Compare in reverse order to get descending sort.
return v2.compareTo(v1);
}
});
return bundles;
}
public int getBundleType(Bundle bundle)
{
Map headerMap = ((BundleRevisionImpl)
bundle.adapt(BundleRevision.class)).getHeaders();
if (headerMap.containsKey(Constants.FRAGMENT_HOST))
{
return PackageAdmin.BUNDLE_TYPE_FRAGMENT;
}
return 0;
}
/**
* Returns the exported package associated with the specified
* package name. If there are more than one version of the package
* being exported, then the highest version is returned.
*
* @param name the name of the exported package to find.
* @return the exported package or null if no matching package was found.
**/
public ExportedPackage getExportedPackage(String name)
{
// Get all versions of the exported package.
ExportedPackage[] pkgs = m_felix.getExportedPackages(name);
// If there are no versions exported, then return null.
if ((pkgs == null) || (pkgs.length == 0))
{
return null;
}
// Sort the exported versions.
Arrays.sort(pkgs, COMPARATOR);
// Return the highest version.
return pkgs[0];
}
public ExportedPackage[] getExportedPackages(String name)
{
ExportedPackage[] pkgs = m_felix.getExportedPackages(name);
return ((pkgs == null) || pkgs.length == 0) ? null : pkgs;
}
/**
* Returns the packages exported by the specified bundle.
*
* @param bundle the bundle whose exported packages are to be returned.
* @return an array of packages exported by the bundle or null if the
* bundle does not export any packages.
**/
public ExportedPackage[] getExportedPackages(Bundle bundle)
{
return m_felix.getExportedPackages(bundle);
}
public Bundle[] getFragments(Bundle bundle)
{
// If the bundle is not a fragment, then return its fragments.
if ((getBundleType(bundle) & BUNDLE_TYPE_FRAGMENT) == 0)
{
List<Bundle> list = new ArrayList<Bundle>();
// Iterate through revisions
for (BundleRevision revision : bundle.adapt(BundleRevisions.class).getRevisions())
{
// Get attached fragments.
if (revision.getWiring() != null)
{
List<BundleRevision> fragments =
Util.getFragments(revision.getWiring());
for (int i = 0; i < fragments.size(); i++)
{
Bundle b = fragments.get(i).getBundle();
if (b != null)
{
list.add(b);
}
}
}
}
// Convert list to an array.
return (list.isEmpty())
? null
: (Bundle[]) list.toArray(new Bundle[list.size()]);
}
return null;
}
public Bundle[] getHosts(Bundle bundle)
{
// If the bundle is a fragment, return its hosts
if ((getBundleType(bundle) & BUNDLE_TYPE_FRAGMENT) != 0)
{
List<Bundle> list = new ArrayList<Bundle>();
// Iterate through revisions
for (BundleRevision revision : bundle.adapt(BundleRevisions.class).getRevisions())
{
// Get hosts
if (revision.getWiring() != null)
{
List<BundleWire> hostWires = revision.getWiring().getRequiredWires(null);
for (int i = 0; (hostWires != null) && (i < hostWires.size()); i++)
{
Bundle b = hostWires.get(i).getProviderWiring().getBundle();
if (b != null)
{
list.add(b);
}
}
}
}
// Convert list to an array.
return (list.isEmpty())
? null
: (Bundle[]) list.toArray(new Bundle[list.size()]);
}
return null;
}
public RequiredBundle[] getRequiredBundles(String symbolicName)
{
List list = new ArrayList();
for (Bundle bundle : m_felix.getBundles())
{
if ((symbolicName == null)
|| (symbolicName.equals(bundle.getSymbolicName())))
{
list.add(new RequiredBundleImpl(m_felix, (BundleImpl) bundle));
}
}
return (list.isEmpty())
? null
: (RequiredBundle[]) list.toArray(new RequiredBundle[list.size()]);
}
/**
* The OSGi specification states that refreshing packages is
* asynchronous; this method simply notifies the package admin
* thread to do a refresh.
* @param bundles array of bundles to refresh or <tt>null</tt> to refresh
* any bundles in need of refreshing.
**/
public void refreshPackages(Bundle[] bundles)
throws SecurityException
{
List<Bundle> list = (bundles == null)
? null
: Arrays.asList(bundles);
m_felix.adapt(FrameworkWiring.class).refreshBundles(list);
}
public boolean resolveBundles(Bundle[] bundles)
{
List<Bundle> list = (bundles == null)
? null
: Arrays.asList(bundles);
return m_felix.adapt(FrameworkWiring.class).resolveBundles(list);
}
} | boneman1231/org.apache.felix | trunk/framework/src/main/java/org/apache/felix/framework/PackageAdminImpl.java | Java | apache-2.0 | 9,594 |
// Copyright 2016-2022 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package define
import (
"net/http"
"github.com/sacloud/libsacloud/v2/internal/define/names"
"github.com/sacloud/libsacloud/v2/internal/define/ops"
"github.com/sacloud/libsacloud/v2/internal/dsl"
"github.com/sacloud/libsacloud/v2/internal/dsl/meta"
"github.com/sacloud/libsacloud/v2/sacloud/naked"
"github.com/sacloud/libsacloud/v2/sacloud/types"
)
const (
esmeAPIName = "ESME"
esmeAPIPathName = "commonserviceitem"
)
var esmeAPI = &dsl.Resource{
Name: esmeAPIName,
PathName: esmeAPIPathName,
PathSuffix: dsl.CloudAPISuffix,
IsGlobal: true,
Operations: dsl.Operations{
// find
ops.FindCommonServiceItem(esmeAPIName, esmeNakedType, findParameter, esmeView),
// create
ops.CreateCommonServiceItem(esmeAPIName, esmeNakedType, esmeCreateParam, esmeView),
// read
ops.ReadCommonServiceItem(esmeAPIName, esmeNakedType, esmeView),
// update
ops.UpdateCommonServiceItem(esmeAPIName, esmeNakedType, esmeUpdateParam, esmeView),
// delete
ops.Delete(esmeAPIName),
// SendMessageWithGeneratedOTP
{
ResourceName: esmeAPIName,
Name: "SendMessageWithGeneratedOTP",
PathFormat: dsl.IDAndSuffixPathFormat("esme/2fa/otp"),
Method: http.MethodPut,
RequestEnvelope: dsl.RequestEnvelope(&dsl.EnvelopePayloadDesc{
Type: meta.Static(naked.ESMESendSMSRequest{}),
Name: esmeAPIName,
}),
Arguments: dsl.Arguments{
dsl.ArgumentID,
dsl.MappableArgument("param", esmeSendMessageWithGeneratedOTPParam, "ESME"),
},
ResponseEnvelope: dsl.ResponseEnvelope(&dsl.EnvelopePayloadDesc{
Type: meta.Static(naked.ESMESendSMSResponse{}),
Name: esmeAPIName,
}),
Results: dsl.Results{
{
SourceField: esmeAPIName,
DestField: esmeSendMessageResult.Name,
Model: esmeSendMessageResult,
},
},
},
// SendMessageWithInputtedOTP
{
ResourceName: esmeAPIName,
Name: "SendMessageWithInputtedOTP",
PathFormat: dsl.IDAndSuffixPathFormat("esme/2fa"),
Method: http.MethodPut,
RequestEnvelope: dsl.RequestEnvelope(&dsl.EnvelopePayloadDesc{
Type: meta.Static(naked.ESMESendSMSRequest{}),
Name: esmeAPIName,
}),
Arguments: dsl.Arguments{
dsl.ArgumentID,
dsl.MappableArgument("param", esmeSendMessageWithInputtedOTPParam, "ESME"),
},
ResponseEnvelope: dsl.ResponseEnvelope(&dsl.EnvelopePayloadDesc{
Type: meta.Static(naked.ESMESendSMSResponse{}),
Name: esmeAPIName,
}),
Results: dsl.Results{
{
SourceField: esmeAPIName,
DestField: esmeSendMessageResult.Name,
Model: esmeSendMessageResult,
},
},
},
// Logs
{
ResourceName: esmeAPIName,
Name: "Logs",
PathFormat: dsl.IDAndSuffixPathFormat("esme/logs"),
Method: http.MethodGet,
Arguments: dsl.Arguments{
dsl.ArgumentID,
},
ResponseEnvelope: dsl.ResponseEnvelope(&dsl.EnvelopePayloadDesc{
Name: "ESME",
Type: meta.Static(&naked.ESMELogs{}),
}),
Results: dsl.Results{
{
SourceField: "ESME.Logs",
DestField: "Logs",
IsPlural: true,
Model: esmeLogsModel,
},
},
},
},
}
var (
esmeNakedType = meta.Static(naked.ESME{})
esmeView = &dsl.Model{
Name: esmeAPIName,
NakedType: esmeNakedType,
Fields: []*dsl.FieldDesc{
fields.ID(),
fields.Name(),
fields.Description(),
fields.Tags(),
fields.Availability(),
fields.IconID(),
fields.CreatedAt(),
fields.ModifiedAt(),
},
}
esmeCreateParam = &dsl.Model{
Name: names.CreateParameterName(esmeAPIName),
NakedType: esmeNakedType,
ConstFields: []*dsl.ConstFieldDesc{
{
Name: "Class",
Type: meta.TypeString,
Tags: &dsl.FieldTags{
MapConv: "Provider.Class",
},
Value: `"esme"`,
},
},
Fields: []*dsl.FieldDesc{
// common fields
fields.Name(),
fields.Description(),
fields.Tags(),
fields.IconID(),
},
}
esmeUpdateParam = &dsl.Model{
Name: names.UpdateParameterName(esmeAPIName),
NakedType: esmeNakedType,
Fields: []*dsl.FieldDesc{
// common fields
fields.Name(),
fields.Description(),
fields.Tags(),
fields.IconID(),
},
}
esmeSendMessageWithGeneratedOTPParam = &dsl.Model{
Name: "ESMESendMessageWithGeneratedOTPRequest",
NakedType: meta.Static(naked.ESMESendSMSRequest{}),
ConstFields: []*dsl.ConstFieldDesc{
{
Name: "OTPOperation",
Type: meta.Static(types.EOTPOperation("")),
Value: `"` + types.OTPOperations.Generate.String() + `"`,
},
},
Fields: []*dsl.FieldDesc{
// common fields
fields.Def("Destination", meta.TypeString), // 宛先 81開始
fields.Def("Sender", meta.TypeString),
fields.Def("DomainName", meta.TypeString),
},
}
esmeSendMessageWithInputtedOTPParam = &dsl.Model{
Name: "ESMESendMessageWithInputtedOTPRequest",
NakedType: meta.Static(naked.ESMESendSMSRequest{}),
ConstFields: []*dsl.ConstFieldDesc{
{
Name: "OTPOperation",
Type: meta.Static(types.EOTPOperation("")),
Value: `"` + types.OTPOperations.Input.String() + `"`,
},
},
Fields: []*dsl.FieldDesc{
// common fields
fields.Def("Destination", meta.TypeString), // 宛先 81開始
fields.Def("Sender", meta.TypeString),
fields.Def("DomainName", meta.TypeString),
fields.Def("OTP", meta.TypeString),
},
}
esmeSendMessageResult = &dsl.Model{
Name: "ESMESendMessageResult",
NakedType: meta.Static(naked.ESMESendSMSResponse{}),
Fields: []*dsl.FieldDesc{
// common fields
fields.Def("MessageID", meta.TypeString),
fields.Def("Status", meta.TypeString), // TODO typesに型定義したいが不明な値があるためstringとしている
fields.Def("OTP", meta.TypeString),
},
}
esmeLogsModel = &dsl.Model{
Name: esmeAPIName + "Logs",
NakedType: meta.Static(naked.ESMELog{}),
IsArray: true,
Fields: []*dsl.FieldDesc{
fields.Def("MessageID", meta.TypeString),
fields.Def("Status", meta.TypeString), // TODO typesに型定義したいが不明な値があるためstringとしている
fields.Def("OTP", meta.TypeString),
fields.Def("Destination", meta.TypeString),
fields.Def("SentAt", meta.TypeTime),
fields.Def("DoneAt", meta.TypeTime),
fields.Def("RetryCount", meta.TypeInt),
},
}
)
| sacloud/libsacloud | v2/internal/define/esme.go | GO | apache-2.0 | 6,874 |
/*
* Copyright 2022 Apollo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ctrip.framework.apollo.biz.service;
import com.ctrip.framework.apollo.biz.entity.NamespaceLock;
import com.ctrip.framework.apollo.biz.repository.NamespaceLockRepository;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
public class NamespaceLockService {
private final NamespaceLockRepository namespaceLockRepository;
public NamespaceLockService(final NamespaceLockRepository namespaceLockRepository) {
this.namespaceLockRepository = namespaceLockRepository;
}
public NamespaceLock findLock(Long namespaceId){
return namespaceLockRepository.findByNamespaceId(namespaceId);
}
@Transactional
public NamespaceLock tryLock(NamespaceLock lock){
return namespaceLockRepository.save(lock);
}
@Transactional
public void unlock(Long namespaceId){
namespaceLockRepository.deleteByNamespaceId(namespaceId);
}
}
| nobodyiam/apollo | apollo-biz/src/main/java/com/ctrip/framework/apollo/biz/service/NamespaceLockService.java | Java | apache-2.0 | 1,527 |
package regsrc
import (
"errors"
"fmt"
"regexp"
"strings"
"github.com/hashicorp/terraform-plugin-sdk/internal/svchost"
)
var (
ErrInvalidModuleSource = errors.New("not a valid registry module source")
// nameSubRe is the sub-expression that matches a valid module namespace or
// name. It's strictly a super-set of what GitHub allows for user/org and
// repo names respectively, but more restrictive than our original repo-name
// regex which allowed periods but could cause ambiguity with hostname
// prefixes. It does not anchor the start or end so it can be composed into
// more complex RegExps below. Alphanumeric with - and _ allowed in non
// leading or trailing positions. Max length 64 chars. (GitHub username is
// 38 max.)
nameSubRe = "[0-9A-Za-z](?:[0-9A-Za-z-_]{0,62}[0-9A-Za-z])?"
// providerSubRe is the sub-expression that matches a valid provider. It
// does not anchor the start or end so it can be composed into more complex
// RegExps below. Only lowercase chars and digits are supported in practice.
// Max length 64 chars.
providerSubRe = "[0-9a-z]{1,64}"
// moduleSourceRe is a regular expression that matches the basic
// namespace/name/provider[//...] format for registry sources. It assumes
// any FriendlyHost prefix has already been removed if present.
moduleSourceRe = regexp.MustCompile(
fmt.Sprintf("^(%s)\\/(%s)\\/(%s)(?:\\/\\/(.*))?$",
nameSubRe, nameSubRe, providerSubRe))
// NameRe is a regular expression defining the format allowed for namespace
// or name fields in module registry implementations.
NameRe = regexp.MustCompile("^" + nameSubRe + "$")
// ProviderRe is a regular expression defining the format allowed for
// provider fields in module registry implementations.
ProviderRe = regexp.MustCompile("^" + providerSubRe + "$")
// these hostnames are not allowed as registry sources, because they are
// already special case module sources in terraform.
disallowed = map[string]bool{
"github.com": true,
"bitbucket.org": true,
}
)
// Module describes a Terraform Registry Module source.
type Module struct {
// RawHost is the friendly host prefix if one was present. It might be nil
// if the original source had no host prefix which implies
// PublicRegistryHost but is distinct from having an actual pointer to
// PublicRegistryHost since it encodes the fact the original string didn't
// include a host prefix at all which is significant for recovering actual
// input not just normalized form. Most callers should access it with Host()
// which will return public registry host instance if it's nil.
RawHost *FriendlyHost
RawNamespace string
RawName string
RawProvider string
RawSubmodule string
}
// NewModule construct a new module source from separate parts. Pass empty
// string if host or submodule are not needed.
func NewModule(host, namespace, name, provider, submodule string) (*Module, error) {
m := &Module{
RawNamespace: namespace,
RawName: name,
RawProvider: provider,
RawSubmodule: submodule,
}
if host != "" {
h := NewFriendlyHost(host)
if h != nil {
fmt.Println("HOST:", h)
if !h.Valid() || disallowed[h.Display()] {
return nil, ErrInvalidModuleSource
}
}
m.RawHost = h
}
return m, nil
}
// ParseModuleSource attempts to parse source as a Terraform registry module
// source. If the string is not found to be in a valid format,
// ErrInvalidModuleSource is returned. Note that this can only be used on
// "input" strings, e.g. either ones supplied by the user or potentially
// normalised but in Display form (unicode). It will fail to parse a source with
// a punycoded domain since this is not permitted input from a user. If you have
// an already normalized string internally, you can compare it without parsing
// by comparing with the normalized version of the subject with the normal
// string equality operator.
func ParseModuleSource(source string) (*Module, error) {
// See if there is a friendly host prefix.
host, rest := ParseFriendlyHost(source)
if host != nil {
if !host.Valid() || disallowed[host.Display()] {
return nil, ErrInvalidModuleSource
}
}
matches := moduleSourceRe.FindStringSubmatch(rest)
if len(matches) < 4 {
return nil, ErrInvalidModuleSource
}
m := &Module{
RawHost: host,
RawNamespace: matches[1],
RawName: matches[2],
RawProvider: matches[3],
}
if len(matches) == 5 {
m.RawSubmodule = matches[4]
}
return m, nil
}
// Display returns the source formatted for display to the user in CLI or web
// output.
func (m *Module) Display() string {
return m.formatWithPrefix(m.normalizedHostPrefix(m.Host().Display()), false)
}
// Normalized returns the source formatted for internal reference or comparison.
func (m *Module) Normalized() string {
return m.formatWithPrefix(m.normalizedHostPrefix(m.Host().Normalized()), false)
}
// String returns the source formatted as the user originally typed it assuming
// it was parsed from user input.
func (m *Module) String() string {
// Don't normalize public registry hostname - leave it exactly like the user
// input it.
hostPrefix := ""
if m.RawHost != nil {
hostPrefix = m.RawHost.String() + "/"
}
return m.formatWithPrefix(hostPrefix, true)
}
// Equal compares the module source against another instance taking
// normalization into account.
func (m *Module) Equal(other *Module) bool {
return m.Normalized() == other.Normalized()
}
// Host returns the FriendlyHost object describing which registry this module is
// in. If the original source string had not host component this will return the
// PublicRegistryHost.
func (m *Module) Host() *FriendlyHost {
if m.RawHost == nil {
return PublicRegistryHost
}
return m.RawHost
}
func (m *Module) normalizedHostPrefix(host string) string {
if m.Host().Equal(PublicRegistryHost) {
return ""
}
return host + "/"
}
func (m *Module) formatWithPrefix(hostPrefix string, preserveCase bool) string {
suffix := ""
if m.RawSubmodule != "" {
suffix = "//" + m.RawSubmodule
}
str := fmt.Sprintf("%s%s/%s/%s%s", hostPrefix, m.RawNamespace, m.RawName,
m.RawProvider, suffix)
// lower case by default
if !preserveCase {
return strings.ToLower(str)
}
return str
}
// Module returns just the registry ID of the module, without a hostname or
// suffix.
func (m *Module) Module() string {
return fmt.Sprintf("%s/%s/%s", m.RawNamespace, m.RawName, m.RawProvider)
}
// SvcHost returns the svchost.Hostname for this module. Since FriendlyHost may
// contain an invalid hostname, this also returns an error indicating if it
// could be converted to a svchost.Hostname. If no host is specified, the
// default PublicRegistryHost is returned.
func (m *Module) SvcHost() (svchost.Hostname, error) {
if m.RawHost == nil {
return svchost.ForComparison(PublicRegistryHost.Raw)
}
return svchost.ForComparison(m.RawHost.Raw)
}
| yamamoto-febc/terraform-provider-sakuracloud | vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/module.go | GO | apache-2.0 | 6,900 |
package co.mindie.cindy.async.manager;
import co.mindie.cindy.async.task.AsyncResult;
import co.mindie.cindy.async.task.AsyncResultTask;
import java.lang.reflect.Method;
/**
* Created by simoncorsin on 20/11/14.
*/
public class AsyncRunner {
////////////////////////
// VARIABLES
////////////////
final private Method method;
////////////////////////
// CONSTRUCTORS
////////////////
public AsyncRunner(Method method) {
this.method = method;
}
////////////////////////
// METHODS
////////////////
public AsyncResult<?> run(AsyncTaskManager asyncTaskManager, Object instance, Object[] params) {
AsyncResultTask<?> asyncResultTask = new AsyncResultTask<Object>() {
@Override
protected Object doRun() throws Throwable {
return method.invoke(instance, params);
}
};
asyncTaskManager.enqueueTask(asyncResultTask);
return asyncResultTask;
}
////////////////////////
// GETTERS/SETTERS
////////////////
}
| mindie/Cindy | src/main/java/co/mindie/cindy/async/manager/AsyncRunner.java | Java | apache-2.0 | 954 |
<?php
/**
* Bitcoin Status Page - Stats
*
* @category File
* @package BitcoinStatus
* @author Craig Watson <craig@cwatson.org>
* @license https://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0
* @link https://github.com/craigwatson/bitcoind-status
*/
if (!isset($_GET['stat'])) {
die('Need to pass stat');
}
require_once './php/config.php';
header('Content-Type: application/javascript');
switch($_GET['stat']) {
case 'connection':
$data_file = $config['stats_file'];
$min_points = $config['peercount_min_data_points'];
$headers = array('Date','Connections');
$prefixes = array('new Date(','');
$postfixes = array('*1000)','');
break;
case 'peer':
$data_file = $config['peercount_file'];
$min_points = $config['peercount_min_data_points'];
$headers = array('Date','Other','Classic','BitCoinJ','Core','Unlimited');
$prefixes = array('new Date(','','','','','');
$postfixes = array('*1000)','','','','','');
foreach ($config['peercount_extra_nodes'] as $key => $val) {
$headers[] = $val;
$prefixes[] = '';
$postfixes[] = '';
}
break;
default:
die('Invalid value passed to stat');
}
// Check for existing data
if (is_file($data_file)) {
$data = json_decode(file_get_contents($data_file), true);
} else {
$data = array();
}
// Start output
echo "var " . $_GET['stat'] . "ChartData = [\n";
// Output headers
$headernum = 0; echo "\t[";
foreach ($headers as $header) {
$headernum++;
echo "'$header'";
if ($headernum != count($headers)) {
echo ",";
}
}
echo "],\n";
// Output data rows
$rownum = 0;
foreach ($data as $row) {
$rownum++;
echo "\t[";
$cellnum = 0;
foreach ($row as $cell) {
echo $prefixes[$cellnum] . $cell . $postfixes[$cellnum];
$cellnum++;
if ($cellnum != count($row)) {
echo ",";
}
}
echo "]";
if ($rownum != count($data)) {
echo ",";
}
echo "\n";
}
// Finish output
echo "]";
| craigwatson/bitcoind-status | chartdata.php | PHP | apache-2.0 | 2,060 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.domain.variable.inverserelation;
import static org.mockito.Mockito.mock;
import static org.optaplanner.core.impl.testdata.util.PlannerAssert.assertCollectionContainsExactly;
import java.util.Arrays;
import org.junit.jupiter.api.Test;
import org.optaplanner.core.impl.domain.entity.descriptor.EntityDescriptor;
import org.optaplanner.core.impl.domain.solution.descriptor.SolutionDescriptor;
import org.optaplanner.core.impl.domain.variable.descriptor.ShadowVariableDescriptor;
import org.optaplanner.core.impl.score.director.ScoreDirector;
import org.optaplanner.core.impl.testdata.domain.shadow.inverserelation.TestdataInverseRelationEntity;
import org.optaplanner.core.impl.testdata.domain.shadow.inverserelation.TestdataInverseRelationSolution;
import org.optaplanner.core.impl.testdata.domain.shadow.inverserelation.TestdataInverseRelationValue;
public class CollectionInverseVariableListenerTest {
@Test
public void normal() {
ScoreDirector scoreDirector = mock(ScoreDirector.class);
SolutionDescriptor solutionDescriptor = TestdataInverseRelationSolution.buildSolutionDescriptor();
EntityDescriptor entityDescriptor = solutionDescriptor.findEntityDescriptorOrFail(TestdataInverseRelationEntity.class);
EntityDescriptor shadowEntityDescriptor = solutionDescriptor
.findEntityDescriptorOrFail(TestdataInverseRelationValue.class);
ShadowVariableDescriptor entitiesVariableDescriptor = shadowEntityDescriptor.getShadowVariableDescriptor("entities");
CollectionInverseVariableListener variableListener = new CollectionInverseVariableListener(
(InverseRelationShadowVariableDescriptor) entitiesVariableDescriptor,
entityDescriptor.getGenuineVariableDescriptor("value"));
TestdataInverseRelationValue val1 = new TestdataInverseRelationValue("1");
TestdataInverseRelationValue val2 = new TestdataInverseRelationValue("2");
TestdataInverseRelationValue val3 = new TestdataInverseRelationValue("3");
TestdataInverseRelationEntity a = new TestdataInverseRelationEntity("a", val1);
TestdataInverseRelationEntity b = new TestdataInverseRelationEntity("b", val1);
TestdataInverseRelationEntity c = new TestdataInverseRelationEntity("c", val3);
TestdataInverseRelationEntity d = new TestdataInverseRelationEntity("d", val3);
TestdataInverseRelationSolution solution = new TestdataInverseRelationSolution("solution");
solution.setEntityList(Arrays.asList(a, b, c, d));
solution.setValueList(Arrays.asList(val1, val2, val3));
assertCollectionContainsExactly(val1.getEntities(), a, b);
assertCollectionContainsExactly(val2.getEntities());
assertCollectionContainsExactly(val3.getEntities(), c, d);
variableListener.beforeVariableChanged(scoreDirector, c);
c.setValue(val2);
variableListener.afterVariableChanged(scoreDirector, c);
assertCollectionContainsExactly(val1.getEntities(), a, b);
assertCollectionContainsExactly(val2.getEntities(), c);
assertCollectionContainsExactly(val3.getEntities(), d);
}
}
| ge0ffrey/optaplanner | optaplanner-core/src/test/java/org/optaplanner/core/impl/domain/variable/inverserelation/CollectionInverseVariableListenerTest.java | Java | apache-2.0 | 3,820 |
#include <bits/stdc++.h>
template<typename T> T gcd(T a, T b) {
if(!b) return a;
return gcd(b, a % b);
}
template<typename T> T lcm(T a, T b) {
return a * b / gcd(a, b);
}
template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; }
template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }
int in() { int x; scanf("%d", &x); return x; }
using namespace std;
#ifdef ONLINE_JUDGE
#define debug(args...)
#else
#define debug(args...) fprintf(stderr,args)
#endif
typedef long long Int;
typedef unsigned long long uInt;
typedef unsigned uint;
int Z, U;
int main(void) {
cin >> Z >> U;
//string ans = "";
list<int> ans;
for (int i = 0; i < U; i++) {
//ans += "1";
ans.push_back(1);
}
U = 0;
list<int>::iterator b2, b1, n1;
for (list<int>::iterator it = ans.begin(); it != ans.end(); it++) {
n1 = it + 1;
for (int i = 1; i < (int) ans.size() && Z > 0; i++) {
if (i - 2 >= 0 && i + 1 < (int) ans.size() && ans[i - 2] == '1' && ans[i - 1] == '1' && ans[i] == '1' && ans[i + 1] == '1') {
ans = ans.substr(0, i) + "0" + ans.substr(i, ans.size() - i);
i -= 2;
Z -= 1;
}
}
for (int i = (int) ans.size(); i >= 0 && Z > 0; i--) {
if (i - 1 >= 0 && ans[i - 1] == '1' && ans[i] == '1') {
ans = ans.substr(0, i) + "0" + ans.substr(i, ans.size() - i);
Z -= 1;
}
}
if (Z > 0 && ans.back() == '1') {
ans += "0";
Z -= 1;
}
if (Z > 0 && ans.front() == '1') {
ans = "0" + ans;
Z -= 1;
}
if (U + Z != 0) ans = "-1";
//cout << ans << endl;
for (int i = 0; i < (int) ans.size(); i++) {
if (i + 1 < (int) ans.size() && ans[i] == '0' && ans[i + 1] == '0') {
ans = "-1";
break;
}
if (i + 2 < (int) ans.size() && ans[i] == '1' && ans[i + 1] == '1' && ans[i + 2] == '1') {
ans = "-1";
break;
}
}
cout << ans << endl;
return 0;
}
| aajjbb/contest-files | Codeforces/Team.cpp | C++ | apache-2.0 | 1,823 |
/* Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
/* eslint-env serviceworker, browser */
import {newResponse, Proxy, skipWaitingAndClaim} from "../proxy";
const CACHE = "MYCACHE";
skipWaitingAndClaim(self);
self.addEventListener("fetch", (event: FetchEvent) => {
// Function to transform responses
function resFn(req: Request, res: Response) {
return newResponse(res, (headers) => {
// Set cache-control header
headers.set("cache-control", "max-age=30, stale-if-error=30");
headers.set("date", new Date().toUTCString());
return headers;
});
}
const proxy = new Proxy(CACHE, null, resFn);
event.respondWith(proxy.fetch(event.request));
});
| ithinkihaveacat/sw-proxy | examples/stale-if-error.ts | TypeScript | apache-2.0 | 1,220 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.organizations.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains a list of child entities, either OUs or accounts.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/organizations-2016-11-28/Child" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Child implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The unique identifier (ID) of this child entity.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits
* (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case
* letters or digits.
* </p>
* </li>
* </ul>
*/
private String id;
/**
* <p>
* The type of this child entity.
* </p>
*/
private String type;
/**
* <p>
* The unique identifier (ID) of this child entity.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits
* (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case
* letters or digits.
* </p>
* </li>
* </ul>
*
* @param id
* The unique identifier (ID) of this child entity.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or
* digits (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional
* lower-case letters or digits.
* </p>
* </li>
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* The unique identifier (ID) of this child entity.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits
* (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case
* letters or digits.
* </p>
* </li>
* </ul>
*
* @return The unique identifier (ID) of this child entity.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or
* digits (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32
* additional lower-case letters or digits.
* </p>
* </li>
*/
public String getId() {
return this.id;
}
/**
* <p>
* The unique identifier (ID) of this child entity.
* </p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or digits
* (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional lower-case
* letters or digits.
* </p>
* </li>
* </ul>
*
* @param id
* The unique identifier (ID) of this child entity.</p>
* <p>
* The <a href="http://wikipedia.org/wiki/regex">regex pattern</a> for a child ID string requires one of the
* following:
* </p>
* <ul>
* <li>
* <p>
* Account: a string that consists of exactly 12 digits.
* </p>
* </li>
* <li>
* <p>
* Organizational unit (OU): a string that begins with "ou-" followed by from 4 to 32 lower-case letters or
* digits (the ID of the root that contains the OU) followed by a second "-" dash and from 8 to 32 additional
* lower-case letters or digits.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Child withId(String id) {
setId(id);
return this;
}
/**
* <p>
* The type of this child entity.
* </p>
*
* @param type
* The type of this child entity.
* @see ChildType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* The type of this child entity.
* </p>
*
* @return The type of this child entity.
* @see ChildType
*/
public String getType() {
return this.type;
}
/**
* <p>
* The type of this child entity.
* </p>
*
* @param type
* The type of this child entity.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChildType
*/
public Child withType(String type) {
setType(type);
return this;
}
/**
* <p>
* The type of this child entity.
* </p>
*
* @param type
* The type of this child entity.
* @see ChildType
*/
public void setType(ChildType type) {
withType(type);
}
/**
* <p>
* The type of this child entity.
* </p>
*
* @param type
* The type of this child entity.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ChildType
*/
public Child withType(ChildType type) {
this.type = type.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getType() != null)
sb.append("Type: ").append(getType());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Child == false)
return false;
Child other = (Child) obj;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
return hashCode;
}
@Override
public Child clone() {
try {
return (Child) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.organizations.model.transform.ChildMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-organizations/src/main/java/com/amazonaws/services/organizations/model/Child.java | Java | apache-2.0 | 10,391 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.java.codeInsight.daemon;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.daemon.impl.IntentionsUI;
import com.intellij.codeInsight.daemon.impl.ShowIntentionsPass;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInsight.intention.impl.CachedIntentions;
import com.intellij.codeInspection.unneededThrows.RedundantThrowsDeclarationLocalInspection;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.testFramework.fixtures.LightJavaCodeInsightFixtureTestCase;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static com.intellij.testFramework.assertions.Assertions.assertThat;
/**
* @author Dmitry Avdeev
*/
public class GutterIntentionsTest extends LightJavaCodeInsightFixtureTestCase {
public void testEmptyIntentions() {
myFixture.configureByText(JavaFileType.INSTANCE, "class Foo {\n" +
" <caret> private String test() {\n" +
" return null;\n" +
" }" +
"}");
myFixture.findAllGutters();
List<IntentionAction> intentions = myFixture.getAvailableIntentions();
assertEmpty(intentions);
}
public void testOptions() {
myFixture.configureByText(JavaFileType.INSTANCE, "public class Foo {\n" +
" public static void <caret>main(String[] args) { someCode(); }" +
"}");
assertSize(1, myFixture.findGuttersAtCaret());
ShowIntentionsPass.IntentionsInfo intentions = ShowIntentionsPass.getActionsToShow(getEditor(), getFile(), false);
assertThat(intentions.guttersToShow.size()).isGreaterThan(1);
}
public void testRunLineMarker() {
myFixture.addClass("package junit.framework; public class TestCase {}");
myFixture.configureByText("MainTest.java", "public class Main<caret>Test extends junit.framework.TestCase {\n" +
" public void testFoo() {\n" +
" }\n" +
"}");
myFixture.doHighlighting();
CachedIntentions intentions = IntentionsUI.getInstance(getProject()).getCachedIntentions(getEditor(), getFile());
assertThat(intentions.getAllActions().get(0).getText()).startsWith("Run ");
}
public void testDoNotIncludeActionGroup() {
myFixture.configureByText(JavaFileType.INSTANCE, "public class Foo {\n" +
" public static void <caret>main(String[] args) { someCode(); }" +
"}");
assertSize(1, myFixture.findGuttersAtCaret());
ShowIntentionsPass.IntentionsInfo intentions = ShowIntentionsPass.getActionsToShow(getEditor(), getFile(), false);
List<HighlightInfo.IntentionActionDescriptor> descriptors = intentions.guttersToShow;
Set<String> names = descriptors.stream().map(descriptor -> descriptor.getDisplayName()).collect(Collectors.toSet());
assertEquals(descriptors.size(), names.size());
}
public void testFixesOnTop() {
myFixture.configureByText(JavaFileType.INSTANCE, "public class Foo extends Bo<caret>o {\n" +
" public static void main(String[] args) {}" +
"}");
List<IntentionAction> actions = myFixture.getAvailableIntentions();
assertThat(actions.get(0).getText()).startsWith("Create class ");
}
public void testWarningFixesOnTop() {
myFixture.addClass("package junit.framework; public class TestCase {}");
myFixture.configureByText("MainTest.java", "public class MainTest extends junit.framework.TestCase {\n" +
" public void testFoo() throws Exce<caret>ption {\n" +
" }\n" +
"}");
myFixture.enableInspections(new RedundantThrowsDeclarationLocalInspection());
myFixture.doHighlighting();
CachedIntentions intentions = IntentionsUI.getInstance(getProject()).getCachedIntentions(getEditor(), getFile());
assertThat(intentions.getAllActions().get(0).getText()).startsWith("Remove ");
}
}
| leafclick/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInsight/daemon/GutterIntentionsTest.java | Java | apache-2.0 | 4,669 |
/*
* Copyright 2011 Nate Koenig & Andrew Howard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "sdf/sdf.hh"
#include "math/Pose.hh"
#include "common/Console.hh"
#include "common/MeshManager.hh"
#include "common/Mesh.hh"
#include "common/Material.hh"
std::vector<std::string> params;
// To Render high quality: povray +H480 +W640 +R5 +AM2 +A0.3 +Q11 <filename>
// Convert radians to degrees
#define RTOD(r) ((r) * 180 / M_PI)
// Convert degrees to radians
#define DTOR(d) ((d) * M_PI / 180)
void help()
{
printf("help");
}
bool parse(int argc, char **argv)
{
if (argc == 1 || std::string(argv[1]) == "help")
{
help();
return false;
}
// Get parameters from command line
for (int i = 1; i < argc; i++)
{
std::string p = argv[i];
boost::trim(p);
params.push_back(p);
}
// Get parameters from stdin
if (!isatty(fileno(stdin)))
{
char str[1024];
while (!feof(stdin))
{
if (fgets(str, 1024, stdin)== NULL)
break;
if (feof(stdin))
break;
std::string p = str;
boost::trim(p);
params.push_back(p);
}
}
return true;
}
gazebo::math::Vector3 Convert(const gazebo::math::Vector3 &_vec)
{
gazebo::math::Vector3 result;
gazebo::math::Quaternion rot1(0, M_PI*.5, 0);
gazebo::math::Quaternion rot2(0, -M_PI*.5, 0);
result = rot1.RotateVector(_vec);
result = rot2.RotateVector(result);
return result;
}
void ProcessMesh(sdf::ElementPtr _elem, const gazebo::math::Pose _pose)
{
const gazebo::common::Mesh *mesh;
mesh = gazebo::common::MeshManager::Instance()->Load(
_elem->GetValueString("filename"));
const_cast<gazebo::common::Mesh*>(mesh)->RecalculateNormals();
for (unsigned int i = 0; i < mesh->GetSubMeshCount(); i++)
{
const gazebo::common::SubMesh *subMesh = mesh->GetSubMesh(i);
printf("mesh2 {\n");
printf(" vertex_vectors {\n");
printf(" %d, \n ", subMesh->GetVertexCount());
for (unsigned int v = 0; v < subMesh->GetVertexCount(); v++)
{
gazebo::math::Vector3 vert = subMesh->GetVertex(v);
// vert = _pose.CoordPositionAdd(vert);
printf("<%f, %f, %f>, ", vert.x, vert.y, vert.z);
}
printf(" }\n");
printf(" normal_vectors {\n");
printf(" %d, \n ", subMesh->GetNormalCount());
for (unsigned int n = 0; n < subMesh->GetNormalCount(); n++)
{
gazebo::math::Vector3 norm = subMesh->GetNormal(n);
printf("<%f, %f, %f>, ", norm.x, norm.y, norm.z);
}
printf(" }\n");
printf(" uv_vectors {\n");
printf(" %d, \n", subMesh->GetTexCoordCount());
for (unsigned int j = 0; j < subMesh->GetTexCoordCount(); j++)
{
printf(" <%f, %f>, \n", subMesh->GetTexCoord(j).x,
1.0 - subMesh->GetTexCoord(j).y);
}
printf(" }\n");
const gazebo::common::Material *mat = mesh->GetMaterial(
subMesh->GetMaterialIndex());
if (mat)
{
printf(" texture_list {\n");
printf(" 1, \n");
printf(" texture {\n");
if (!mat->GetTextureImage().empty())
{
printf(" uv_mapping pigment { image_map ");
printf("{ tiff \"%s\" } }\n",
mat->GetTextureImage().c_str());
}
else
{
printf(" pigment { color rgb <%f, %f, %f> }\n",
mat->GetDiffuse().r, mat->GetDiffuse().g, mat->GetDiffuse().b);
}
printf(" finish {\n");
printf(" ambient color rgb <%f, %f, %f>\n",
mat->GetAmbient().r, mat->GetAmbient().g, mat->GetAmbient().b);
printf(" specular %f\n", 1.0);
printf(" }\n");
printf(" }\n");
printf(" }\n");
}
printf(" face_indices {\n");
printf(" %d, \n", subMesh->GetIndexCount() / 3);
for (unsigned int j = 0; j < subMesh->GetIndexCount(); j+= 3)
{
if (mat)
{
printf(" <%d, %d, %d>, 0\n", subMesh->GetIndex(j),
subMesh->GetIndex(j+1), subMesh->GetIndex(j+2));
}
else
{
printf(" <%d, %d, %d>\n", subMesh->GetIndex(j),
subMesh->GetIndex(j+1), subMesh->GetIndex(j+2));
}
}
printf(" }\n");
printf(" normal_indices {\n");
printf(" %d, \n", subMesh->GetIndexCount() / 3);
for (unsigned int j = 0; j < subMesh->GetIndexCount(); j+= 3)
{
printf(" <%d, %d, %d>, \n", subMesh->GetIndex(j),
subMesh->GetIndex(j+1), subMesh->GetIndex(j+2));
}
printf(" }\n");
/*
printf(" uv_indices {\n");
printf(" %d, \n", subMesh->GetIndexCount() / 3);
for (unsigned int j = 0; j < subMesh->GetIndexCount(); j+= 3)
{
printf(" <%d, %d, %d>, \n", subMesh->GetIndex(j),
subMesh->GetIndex(j+1), subMesh->GetIndex(j+2));
}
printf(" }\n");
*/
gazebo::math::Vector3 rpy = _pose.rot.GetAsEuler();
printf(" translate <%f, %f, %f>\n", _pose.pos.x, _pose.pos.y, _pose.pos.z);
printf(" rotate <%f, %f, %f>\n", RTOD(rpy.x), RTOD(rpy.y), RTOD(rpy.z));
printf("}\n");
}
}
void ProcessLight(sdf::ElementPtr _elem)
{
gazebo::math::Pose pose;
gazebo::common::Color diffuse, specular;
pose = _elem->GetOrCreateElement("origin")->GetValuePose("pose");
diffuse = _elem->GetValueColor("diffuse");
specular = _elem->GetValueColor("specular");
// double fadeDist =
// _elem->GetElement("attenuation")->GetValueDouble("range");
// double constant =
// _elem->GetElement("attenuation")->GetValueDouble("constant");
// double linear = _elem->GetElement("attenuation")->GetValueDouble("linear");
// double quadratic =
// _elem->GetElement("attenuation")->GetValueDouble("quadratic");
printf("light_source {\n");
printf(" <%f, %f, %f>, rgb <%f, %f, %f>\n",
pose.pos.x, pose.pos.y, pose.pos.z,
diffuse.r, diffuse.g, diffuse.b);
std::string type = _elem->GetValueString("type");
if (type == "point")
{
// printf(" pointlight\n");
}
else if (type == "directional")
{
printf(" parallel\n");
}
else if (type == "spot")
{
double innerAngle, outerAngle, falloff;
innerAngle = _elem->GetElement("spot")->GetValueDouble("inner_angle");
outerAngle = _elem->GetElement("spot")->GetValueDouble("outer_angle");
falloff = _elem->GetElement("spot")->GetValueDouble("falloff");
printf(" spotlight\n");
printf(" radius %f\n", RTOD(innerAngle));
printf(" falloff %f\n", RTOD(outerAngle));
printf(" tightness %f\n", falloff);
}
if (_elem->HasElement("direction"))
{
gazebo::math::Vector3 dir =
_elem->GetElement("direction")->GetValueVector3("xyz");
gazebo::math::Plane plane(gazebo::math::Vector3(0, 0, 1));
double d = plane.Distance(pose.pos, dir);
double t;
t = atan2(dir.x, dir.z*-1);
double x = sin(t) * d;
t = atan2(dir.y, dir.z*-1);
double y = sin(t) * d;
printf(" point_at <%f, %f, 0.0>\n", x, y);
}
printf("}\n");
}
void ProcessScene(sdf::ElementPtr _elem)
{
gazebo::common::Color color;
if (_elem->HasElement("background"))
{
color = _elem->GetValueColor("background");
printf("background { rgb <%f, %f, %f> }\n", color.r, color.g, color.b);
}
if (_elem->HasElement("ambient"))
{
color = _elem->GetValueColor("ambient");
// printf("global_settings { ambient_light rgb <%f, %f, %f> }\n",
// color.R(), color.G(), color.B());
}
// int count = 35;
int count = 1600;
// int recursionLimit = 3;
int recursionLimit = 20;
// float errorBound = 1.8;
float errorBound = 1.0;
// Note: Extreme quality
printf("global_settings { radiosity{\n");
printf(" count %d\n", count);
printf(" recursion_limit %d\n", recursionLimit);
printf(" error_bound %f\n", errorBound);
printf("} }\n");
}
void ProcessGeometry(sdf::ElementPtr _elem, const gazebo::math::Pose &_pose)
{
if (_elem->HasElement("plane"))
{
sdf::ElementPtr planeElem = _elem->GetElement("plane");
gazebo::math::Vector3 normal = planeElem->GetValueVector3("normal");
printf("plane {\n");
printf(" <%f, %f, %f>, 0\n", normal.x, normal.y, normal.z);
printf(" texture {pigment { color Yellow } }\n");
printf("}\n");
}
else if (_elem->HasElement("box"))
{
sdf::ElementPtr boxElem = _elem->GetElement("box");
gazebo::math::Vector3 size = boxElem->GetValueVector3("size");
printf("box {\n");
gazebo::math::Vector3 corner1 = _pose.pos - (size/2.0);
gazebo::math::Vector3 corner2 = _pose.pos + (size/2.0);
corner1 = _pose.rot.RotateVector(corner1);
corner2 = _pose.rot.RotateVector(corner2);
printf(" <%f, %f, %f, >, <%f, %f, %f>\n", corner1.x, corner1.y, corner1.z,
corner2.x, corner2.y, corner2.z);
printf("}\n");
}
else if (_elem->HasElement("cylinder"))
{
sdf::ElementPtr cylinderElem = _elem->GetElement("cylinder");
double radius = cylinderElem->GetValueDouble("radius");
double length = cylinderElem->GetValueDouble("length");
gazebo::math::Vector3 capPoint = _pose.pos;
capPoint.z += length;
capPoint = _pose.rot.RotateVector(capPoint);
printf("cylinder {\n");
printf(" <%f, %f, %f>, <%f, %f, %f>, %f\n",
_pose.pos.x, _pose.pos.y, _pose.pos.z,
capPoint.x, capPoint.y, capPoint.z, radius);
printf("}\n");
}
else if (_elem->HasElement("sphere"))
{
sdf::ElementPtr sphereElem = _elem->GetElement("sphere");
double radius = sphereElem->GetValueDouble("radius");
printf("sphere {\n");
printf(" <%f, %f, %f> %f\n",
_pose.pos.x, _pose.pos.y, _pose.pos.z, radius);
printf("}\n");
}
else if (_elem->HasElement("mesh"))
{
ProcessMesh(_elem->GetElement("mesh"), _pose);
}
}
int main(int argc, char **argv)
{
if (!parse(argc, argv))
return 0;
// Load the world file
sdf::SDFPtr sdf(new sdf::SDF);
if (!sdf::init(sdf))
{
gzerr << "Unable to initialize sdf\n";
return false;
}
if (!sdf::readFile(params[0], sdf))
{
gzerr << "Unable to read sdf file[" << params[0] << "]\n";
return false;
}
printf("#include \"colors.inc\"\n");
printf("#include \"shapes.inc\"\n");
printf("camera {\n");
printf(" location <-2, 0, 2>\n");
printf(" look_at <0, 0, 1>\n");
printf(" sky <0, 0, 1>\n");
printf(" direction <1, 0, 0>\n");
printf(" up <0, 0, -1>\n");
printf(" right <0, 1, 0>\n");
printf("}\n");
sdf::ElementPtr root = sdf->root;
gazebo::math::Pose modelPose, linkPose, visualPose;
sdf::ElementPtr worldElem = root->GetElement("world");
while (worldElem)
{
if (worldElem->HasElement("scene"))
ProcessScene(worldElem->GetElement("scene"));
if (worldElem->HasElement("light"))
{
sdf::ElementPtr lightElem = worldElem->GetElement("light");
while (lightElem)
{
ProcessLight(lightElem);
lightElem = lightElem->GetNextElement("light");
}
}
sdf::ElementPtr modelElem = worldElem->GetElement("model");
while (modelElem)
{
modelPose = modelElem->GetOrCreateElement("origin")->GetValuePose("pose");
sdf::ElementPtr linkElem = modelElem->GetElement("link");
while (linkElem)
{
linkPose = linkElem->GetOrCreateElement("origin")->GetValuePose("pose");
if (linkElem->HasElement("visual"))
{
sdf::ElementPtr visualElem = linkElem->GetElement("visual");
while (visualElem)
{
visualPose =
visualElem->GetOrCreateElement("origin")->GetValuePose("pose");
// visualPose = (visualPose + linkPose) + modelPose;
visualPose = modelPose + (linkPose + visualPose);
// visualPose.pos = modelPose.pos + linkPose.pos + visualPose.pos;
// visualPose.rot = visualPose.rot * linkPose.rot * modelPose.rot;
sdf::ElementPtr geomElem = visualElem->GetElement("geometry");
ProcessGeometry(geomElem, visualPose);
visualElem = visualElem->GetNextElement("visual");
}
}
linkElem = linkElem->GetNextElement("link");
}
modelElem = modelElem->GetNextElement("model");
}
worldElem = worldElem->GetNextElement("world");
}
}
| Karandaras/scene-reconstruction-gazebo | tools/sdf2pov.cc | C++ | apache-2.0 | 12,696 |
/*
* Waltz - Enterprise Architecture
* Copyright (C) 2016, 2017, 2018, 2019 Waltz open source project
* See README.md for more information
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific
*
*/
package com.khartec.waltz.common;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import static com.khartec.waltz.common.SetUtilities.asSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class EnumUtilities_names {
private enum MyEnum {
A,
B,
C,
D
}
@Test
public void namesCanUseVarArgs() {
assertEquals(asSet("A", "C"), EnumUtilities.names(MyEnum.A, MyEnum.C));
}
@Test
public void namesCanAlsoUseACollection() {
assertEquals(asSet("A", "D"), EnumUtilities.names(MyEnum.A, MyEnum.D));
}
@Test(expected = NullPointerException.class)
public void namesWithNullValues() {
EnumUtilities.names((Enum) null);
}
@Test
public void namesWithNoValues() {
assertNotNull(EnumUtilities.names());
}
}
| kamransaleem/waltz | waltz-common/src/test/java/com/khartec/waltz/common/EnumUtilities_names.java | Java | apache-2.0 | 1,602 |
<?php
declare(strict_types=1);
/**
* This file is part of the AuthnetJSON package.
*
* (c) John Conde <stymiee@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Authnetjson\Exception;
/**
* Exception that is throw when invalid Authorize.Net API credentials are provided
*
* @author John Conde <stymiee@gmail.com>
* @copyright John Conde <stymiee@gmail.com>
* @license http://www.apache.org/licenses/LICENSE-2.0.html Apache License, Version 2.0
* @link https://github.com/stymiee/authnetjson
*/
class AuthnetInvalidCredentialsException extends AuthnetException
{
}
| stymiee/authnetjson | src/Authnetjson/Exception/AuthnetInvalidCredentialsException.php | PHP | apache-2.0 | 694 |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2016.01.28 at 11:39:24 PM COT
//
package com.bpm4sb.bpmn;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Plane complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Plane">
* <complexContent>
* <extension base="{http://www.omg.org/spec/DD/20100524/DI}Node">
* <sequence>
* <element ref="{http://www.omg.org/spec/DD/20100524/DI}DiagramElement" maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <anyAttribute processContents='lax' namespace='##other'/>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Plane", namespace = "http://www.omg.org/spec/DD/20100524/DI", propOrder = {
"diagramElement"
})
@XmlSeeAlso({
BPMNPlane.class
})
public abstract class Plane
extends Node
{
@XmlElementRef(name = "DiagramElement", namespace = "http://www.omg.org/spec/DD/20100524/DI", type = JAXBElement.class)
protected List<JAXBElement<? extends DiagramElement>> diagramElement;
/**
* Gets the value of the diagramElement property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the diagramElement property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getDiagramElement().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link JAXBElement }{@code <}{@link DiagramElement }{@code >}
* {@link JAXBElement }{@code <}{@link BPMNEdge }{@code >}
* {@link JAXBElement }{@code <}{@link BPMNShape }{@code >}
*
*
*/
public List<JAXBElement<? extends DiagramElement>> getDiagramElement() {
if (diagramElement == null) {
diagramElement = new ArrayList<JAXBElement<? extends DiagramElement>>();
}
return this.diagramElement;
}
}
| ianache/didara-bpm | bpmengine-service/bpmengine-bpmn/src/main/java/com/bpm4sb/bpmn/Plane.java | Java | apache-2.0 | 2,832 |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import com.orientechnologies.orient.client.remote.OStorageRemote;
import com.orientechnologies.orient.client.remote.OStorageRemoteThread;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.OClusterPosition;
import com.orientechnologies.orient.core.id.OClusterPositionFactory;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.index.OIndex;
import com.orientechnologies.orient.core.index.OIndexException;
import com.orientechnologies.orient.core.index.OIndexManager;
import com.orientechnologies.orient.core.index.OSimpleKeyIndexDefinition;
import com.orientechnologies.orient.core.iterator.ORecordIteratorCluster;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OClass.INDEX_TYPE;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.sql.OCommandSQL;
import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery;
import com.orientechnologies.orient.core.tx.OTransaction;
import com.orientechnologies.orient.object.db.OObjectDatabaseTx;
import com.orientechnologies.orient.test.database.base.OrientTest;
import com.orientechnologies.orient.test.domain.business.Account;
import com.orientechnologies.orient.test.domain.whiz.Profile;
@Test(groups = { "index" })
public class IndexTest {
private OObjectDatabaseTx database;
protected long startRecordNumber;
@BeforeMethod
public void beforeMethod() {
database.open("admin", "admin");
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.business");
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.whiz");
database.getEntityManager().registerEntityClasses("com.orientechnologies.orient.test.domain.base");
}
@AfterMethod
public void afterMethod() {
database.close();
}
@Parameters(value = "url")
public IndexTest(String iURL) {
database = new OObjectDatabaseTx(iURL);
}
@Test(dependsOnMethods = "testIndexGetValuesUniqueIndex")
public void testDuplicatedIndexOnUnique() {
Profile jayMiner = new Profile("Jay", "Jay", "Miner", null);
database.save(jayMiner);
Profile jacobMiner = new Profile("Jay", "Jacob", "Miner", null);
try {
database.save(jacobMiner);
// IT SHOULD GIVE ERROR ON DUPLICATED KEY
Assert.assertTrue(false);
} catch (OIndexException e) {
Assert.assertTrue(true);
}
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInUniqueIndex() {
final OProperty nickProperty = database.getMetadata().getSchema().getClass("Profile").getProperty("nick");
Assert.assertEquals(nickProperty.getIndexes().iterator().next().getType(), OClass.INDEX_TYPE.UNIQUE.toString());
final boolean localStorage = !(database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread);
boolean oldRecording = true;
long indexQueries = 0L;
if (localStorage) {
oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>(
"SELECT * FROM Profile WHERE nick in ['ZZZJayLongNickIndex0' ,'ZZZJayLongNickIndex1', 'ZZZJayLongNickIndex2']"))
.execute();
final List<String> expectedSurnames = new ArrayList<String>(Arrays.asList("NolteIndex0", "NolteIndex1", "NolteIndex2"));
if (localStorage && !oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 3);
for (final Profile profile : result) {
expectedSurnames.remove(profile.getSurname());
}
Assert.assertEquals(expectedSurnames.size(), 0);
if (localStorage) {
final long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
}
@Test(dependsOnMethods = "testDuplicatedIndexOnUnique")
public void testUseOfIndex() {
final List<Profile> result = database.command(new OSQLSynchQuery<Profile>("select * from Profile where nick = 'Jay'"))
.execute();
Assert.assertFalse(result.isEmpty());
Profile record;
for (int i = 0; i < result.size(); ++i) {
record = result.get(i);
OrientTest.printRecord(i, record);
Assert.assertTrue(record.getName().toString().equalsIgnoreCase("Jay"));
}
}
@Test(dependsOnMethods = "testDuplicatedIndexOnUnique")
public void testIndexEntries() {
List<Profile> result = database.command(new OSQLSynchQuery<Profile>("select * from Profile where nick is not null")).execute();
OIndex<?> idx = database.getMetadata().getIndexManager().getIndex("Profile.nick");
Assert.assertEquals(idx.getSize(), result.size());
}
@Test(dependsOnMethods = "testDuplicatedIndexOnUnique")
public void testIndexSize() {
List<Profile> result = database.command(new OSQLSynchQuery<Profile>("select * from Profile where nick is not null")).execute();
int profileSize = result.size();
database.getMetadata().getIndexManager().reload();
Assert.assertEquals(database.getMetadata().getIndexManager().getIndex("Profile.nick").getSize(), profileSize);
for (int i = 0; i < 10; i++) {
Profile profile = new Profile("Yay-" + i, "Jay", "Miner", null);
database.save(profile);
profileSize++;
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("Profile.nick").get("Yay-" + i));
}
}
@Test(dependsOnMethods = "testUseOfIndex")
public void testChangeOfIndexToNotUnique() {
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").dropIndexes();
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
}
@Test(dependsOnMethods = "testChangeOfIndexToNotUnique")
public void testDuplicatedIndexOnNotUnique() {
Profile nickNolte = new Profile("Jay", "Nick", "Nolte", null);
database.save(nickNolte);
}
@Test(dependsOnMethods = "testDuplicatedIndexOnNotUnique")
public void testQueryIndex() {
List<?> result = database.query(new OSQLSynchQuery<Object>("select from index:profile.nick where key = 'Jay'"));
Assert.assertTrue(result.size() > 0);
}
@Test
public void testIndexSQL() {
database.command(new OCommandSQL("create index idx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("idx"));
final List<OClusterPosition> positions = getValidPositions(3);
database.command(new OCommandSQL("insert into index:IDX (key,rid) values (10,#3:" + positions.get(0) + ')')).execute();
database.command(new OCommandSQL("insert into index:IDX (key,rid) values (20,#3:" + positions.get(1) + ')')).execute();
List<ODocument> result = database.command(new OCommandSQL("select from index:IDX")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(result.size(), 2);
for (ODocument d : result) {
Assert.assertTrue(d.containsField("key"));
Assert.assertTrue(d.containsField("rid"));
if (d.field("key").equals(10))
Assert.assertEquals(d.rawField("rid"), new ORecordId(3, positions.get(0)));
else if (d.field("key").equals(20))
Assert.assertEquals(d.rawField("rid"), new ORecordId(3, positions.get(1)));
else
Assert.assertTrue(false);
}
result = database.command(new OCommandSQL("select key, rid from index:IDX")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(result.size(), 2);
for (ODocument d : result) {
Assert.assertTrue(d.containsField("key"));
Assert.assertTrue(d.containsField("rid"));
if (d.field("key").equals(10))
Assert.assertEquals(d.rawField("rid"), new ORecordId(3, positions.get(0)));
else if (d.field("key").equals(20))
Assert.assertEquals(d.rawField("rid"), new ORecordId(3, positions.get(1)));
else
Assert.assertTrue(false);
}
result = database.command(new OCommandSQL("select key from index:IDX")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(result.size(), 2);
for (ODocument d : result) {
Assert.assertTrue(d.containsField("key"));
Assert.assertFalse(d.containsField("rid"));
}
result = database.command(new OCommandSQL("select rid from index:IDX")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(result.size(), 2);
for (ODocument d : result) {
Assert.assertFalse(d.containsField("key"));
Assert.assertTrue(d.containsField("rid"));
}
result = database.command(new OCommandSQL("select rid from index:IDX where key = 10")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(result.size(), 1);
for (ODocument d : result) {
Assert.assertFalse(d.containsField("key"));
Assert.assertTrue(d.containsField("rid"));
}
}
@Test(dependsOnMethods = "testQueryIndex")
public void testChangeOfIndexToUnique() {
try {
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").dropIndexes();
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").createIndex(OClass.INDEX_TYPE.UNIQUE);
Assert.assertTrue(false);
} catch (OIndexException e) {
Assert.assertTrue(true);
}
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testValuesMajor() {
database.command(new OCommandSQL("create index equalityIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("equalityIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:equalityIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("equalityIdx");
final Collection<Long> valuesMajorResults = new ArrayList<Long>(Arrays.asList(4L, 5L));
Collection<OIdentifiable> indexCollection = index.getValuesMajor(3, false);
Assert.assertEquals(indexCollection.size(), 2);
for (OIdentifiable identifiable : indexCollection) {
valuesMajorResults.remove(identifiable.getIdentity().getClusterPosition().longValue());
}
Assert.assertEquals(valuesMajorResults.size(), 0);
final Collection<Long> valuesMajorInclusiveResults = new ArrayList<Long>(Arrays.asList(3L, 4L, 5L));
indexCollection = index.getValuesMajor(3, true);
Assert.assertEquals(indexCollection.size(), 3);
for (OIdentifiable identifiable : indexCollection) {
valuesMajorInclusiveResults.remove(identifiable.getIdentity().getClusterPosition().longValue());
}
Assert.assertEquals(valuesMajorInclusiveResults.size(), 0);
indexCollection = index.getValuesMajor(5, true);
Assert.assertEquals(indexCollection.size(), 1);
Assert.assertEquals(indexCollection.iterator().next().getIdentity().getClusterPosition(),
OClusterPositionFactory.INSTANCE.valueOf(5));
indexCollection = index.getValuesMajor(5, false);
Assert.assertEquals(indexCollection.size(), 0);
database.command(new OCommandSQL("drop index equalityIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testEntriesMajor() {
database.command(new OCommandSQL("create index equalityIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("equalityIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:equalityIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("equalityIdx");
final Collection<Integer> valuesMajorResults = new ArrayList<Integer>(Arrays.asList(4, 5));
Collection<ODocument> indexCollection = index.getEntriesMajor(3, false);
Assert.assertEquals(indexCollection.size(), 2);
for (ODocument doc : indexCollection) {
valuesMajorResults.remove(doc.<Integer> field("key"));
Assert.assertEquals(doc.<ORecordId> rawField("rid"),
new ORecordId(10, OClusterPositionFactory.INSTANCE.valueOf(doc.<Integer> field("key").longValue())));
}
Assert.assertEquals(valuesMajorResults.size(), 0);
final Collection<Integer> valuesMajorInclusiveResults = new ArrayList<Integer>(Arrays.asList(3, 4, 5));
indexCollection = index.getEntriesMajor(3, true);
Assert.assertEquals(indexCollection.size(), 3);
for (ODocument doc : indexCollection) {
valuesMajorInclusiveResults.remove(doc.<Integer> field("key"));
Assert.assertEquals(doc.<ORecordId> rawField("rid"),
new ORecordId(10, OClusterPositionFactory.INSTANCE.valueOf(doc.<Integer> field("key").longValue())));
}
Assert.assertEquals(valuesMajorInclusiveResults.size(), 0);
indexCollection = index.getEntriesMajor(5, true);
Assert.assertEquals(indexCollection.size(), 1);
Assert.assertEquals(indexCollection.iterator().next().<Integer> field("key"), Integer.valueOf(5));
Assert.assertEquals(indexCollection.iterator().next().<ORecordId> rawField("rid"), new ORecordId(10,
OClusterPositionFactory.INSTANCE.valueOf(5)));
indexCollection = index.getEntriesMajor(5, false);
Assert.assertEquals(indexCollection.size(), 0);
database.command(new OCommandSQL("drop index equalityIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testValuesMinor() {
database.command(new OCommandSQL("create index equalityIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("equalityIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:equalityIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("equalityIdx");
final Collection<Long> valuesMinorResults = new ArrayList<Long>(Arrays.asList(0L, 1L, 2L));
Collection<OIdentifiable> indexCollection = index.getValuesMinor(3, false);
Assert.assertEquals(indexCollection.size(), 3);
for (OIdentifiable identifiable : indexCollection) {
valuesMinorResults.remove(identifiable.getIdentity().getClusterPosition().longValue());
}
Assert.assertEquals(valuesMinorResults.size(), 0);
final Collection<Long> valuesMinorInclusiveResults = new ArrayList<Long>(Arrays.asList(0L, 1L, 2L, 3L));
indexCollection = index.getValuesMinor(3, true);
Assert.assertEquals(indexCollection.size(), 4);
for (OIdentifiable identifiable : indexCollection) {
valuesMinorInclusiveResults.remove(identifiable.getIdentity().getClusterPosition().longValue());
}
Assert.assertEquals(valuesMinorInclusiveResults.size(), 0);
indexCollection = index.getValuesMinor(0, true);
Assert.assertEquals(indexCollection.size(), 1);
Assert.assertEquals(indexCollection.iterator().next().getIdentity().getClusterPosition(),
OClusterPositionFactory.INSTANCE.valueOf(0));
indexCollection = index.getValuesMinor(0, false);
Assert.assertEquals(indexCollection.size(), 0);
database.command(new OCommandSQL("drop index equalityIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testEntriesMinor() {
database.command(new OCommandSQL("create index equalityIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("equalityIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:equalityIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("equalityIdx");
final Collection<Integer> valuesMinorResults = new ArrayList<Integer>(Arrays.asList(0, 1, 2));
Collection<ODocument> indexCollection = index.getEntriesMinor(3, false);
Assert.assertEquals(indexCollection.size(), 3);
for (ODocument doc : indexCollection) {
valuesMinorResults.remove(doc.<Integer> field("key"));
Assert.assertEquals(doc.<ORecordId> rawField("rid"),
new ORecordId(10, OClusterPositionFactory.INSTANCE.valueOf(doc.<Integer> field("key").longValue())));
}
Assert.assertEquals(valuesMinorResults.size(), 0);
final Collection<Integer> valuesMinorInclusiveResults = new ArrayList<Integer>(Arrays.asList(0, 1, 2, 3));
indexCollection = index.getEntriesMinor(3, true);
Assert.assertEquals(indexCollection.size(), 4);
for (ODocument doc : indexCollection) {
valuesMinorInclusiveResults.remove(doc.<Integer> field("key"));
Assert.assertEquals(doc.<ORecordId> rawField("rid"),
new ORecordId(10, OClusterPositionFactory.INSTANCE.valueOf(doc.<Integer> field("key").longValue())));
}
Assert.assertEquals(valuesMinorInclusiveResults.size(), 0);
indexCollection = index.getEntriesMinor(0, true);
Assert.assertEquals(indexCollection.size(), 1);
Assert.assertEquals(indexCollection.iterator().next().<Integer> field("key"), Integer.valueOf(0));
Assert.assertEquals(indexCollection.iterator().next().<ORecordId> rawField("rid"), new ORecordId(10,
OClusterPositionFactory.INSTANCE.valueOf(0)));
indexCollection = index.getEntriesMinor(0, false);
Assert.assertEquals(indexCollection.size(), 0);
database.command(new OCommandSQL("drop index equalityIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testBetweenEntries() {
database.command(new OCommandSQL("create index equalityIdx unique integer")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("equalityIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:equalityIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("equalityIdx");
final Collection<Integer> betweenResults = new ArrayList<Integer>(Arrays.asList(1, 2, 3));
Collection<ODocument> indexCollection = index.getEntriesBetween(1, 3);
Assert.assertEquals(indexCollection.size(), 3);
for (ODocument doc : indexCollection) {
betweenResults.remove(doc.<Integer> field("key"));
Assert.assertEquals(doc.<ORecordId> rawField("rid"),
new ORecordId(10, OClusterPositionFactory.INSTANCE.valueOf(doc.<Integer> field("key").longValue())));
}
Assert.assertEquals(betweenResults.size(), 0);
database.command(new OCommandSQL("drop index equalityIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInMajorSelect() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>("select * from Profile where nick > 'ZZZJayLongNickIndex3'")).execute();
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("ZZZJayLongNickIndex4", "ZZZJayLongNickIndex5"));
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 2);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInMajorEqualsSelect() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>("select * from Profile where nick >= 'ZZZJayLongNickIndex3'")).execute();
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("ZZZJayLongNickIndex3", "ZZZJayLongNickIndex4",
"ZZZJayLongNickIndex5"));
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 3);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInMinorSelect() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(new OSQLSynchQuery<Profile>("select * from Profile where nick < '002'"))
.execute();
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("000", "001"));
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 2);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInMinorEqualsSelect() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(new OSQLSynchQuery<Profile>("select * from Profile where nick <= '002'"))
.execute();
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("000", "001", "002"));
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 3);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexBetweenSelect() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>("select * from Profile where nick between '001' and '004'")).execute();
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("001", "002", "003", "004"));
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 4);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInComplexSelectOne() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>("select * from Profile where (name = 'Giuseppe' OR name <> 'Napoleone')"
+ " AND (nick is not null AND (name = 'Giuseppe' OR name <> 'Napoleone') AND (nick >= 'ZZZJayLongNickIndex3'))"))
.execute();
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("ZZZJayLongNickIndex3", "ZZZJayLongNickIndex4",
"ZZZJayLongNickIndex5"));
Assert.assertEquals(result.size(), 3);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexInComplexSelectTwo() {
if (database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread) {
return;
}
final boolean oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
long indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
final List<Profile> result = database
.command(
new OSQLSynchQuery<Profile>(
"select * from Profile where "
+ "((name = 'Giuseppe' OR name <> 'Napoleone')"
+ " AND (nick is not null AND (name = 'Giuseppe' OR name <> 'Napoleone') AND (nick >= 'ZZZJayLongNickIndex3' OR nick >= 'ZZZJayLongNickIndex4')))"))
.execute();
if (!oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
final List<String> expectedNicks = new ArrayList<String>(Arrays.asList("ZZZJayLongNickIndex3", "ZZZJayLongNickIndex4",
"ZZZJayLongNickIndex5"));
Assert.assertEquals(result.size(), 3);
for (Profile profile : result) {
expectedNicks.remove(profile.getNick());
}
Assert.assertEquals(expectedNicks.size(), 0);
long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries);
}
public void populateIndexDocuments() {
for (int i = 0; i <= 5; i++) {
final Profile profile = new Profile("ZZZJayLongNickIndex" + i, "NickIndex" + i, "NolteIndex" + i, null);
database.save(profile);
}
for (int i = 0; i <= 5; i++) {
final Profile profile = new Profile("00" + i, "NickIndex" + i, "NolteIndex" + i, null);
database.save(profile);
}
}
@Test(dependsOnMethods = "testChangeOfIndexToUnique")
public void removeNotUniqueIndexOnNick() {
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").dropIndexes();
database.getMetadata().getSchema().save();
}
@Test(dependsOnMethods = "removeNotUniqueIndexOnNick")
public void testQueryingWithoutNickIndex() {
Assert.assertTrue(database.getMetadata().getSchema().getClass("Profile").getProperty("name").isIndexed());
Assert.assertTrue(!database.getMetadata().getSchema().getClass("Profile").getProperty("nick").isIndexed());
List<Profile> result = database.command(new OSQLSynchQuery<ODocument>("SELECT FROM Profile WHERE nick = 'Jay'")).execute();
Assert.assertEquals(result.size(), 2);
result = database.command(new OSQLSynchQuery<ODocument>("SELECT FROM Profile WHERE nick = 'Jay' AND name = 'Jay'")).execute();
Assert.assertEquals(result.size(), 1);
result = database.command(new OSQLSynchQuery<ODocument>("SELECT FROM Profile WHERE nick = 'Jay' AND name = 'Nick'")).execute();
Assert.assertEquals(result.size(), 1);
}
@Test(dependsOnMethods = "testQueryingWithoutNickIndex")
public void createNotUniqueIndexOnNick() {
database.getMetadata().getSchema().getClass("Profile").getProperty("nick").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
database.getMetadata().getSchema().save();
}
@Test(dependsOnMethods = { "createNotUniqueIndexOnNick", "populateIndexDocuments" })
public void testIndexInNotUniqueIndex() {
final OProperty nickProperty = database.getMetadata().getSchema().getClass("Profile").getProperty("nick");
Assert.assertEquals(nickProperty.getIndexes().iterator().next().getType(), OClass.INDEX_TYPE.NOTUNIQUE.toString());
final boolean localStorage = !(database.getStorage() instanceof OStorageRemote || database.getStorage() instanceof OStorageRemoteThread);
boolean oldRecording = true;
long indexQueries = 0L;
if (localStorage) {
oldRecording = Orient.instance().getProfiler().isRecording();
if (!oldRecording) {
Orient.instance().getProfiler().startRecording();
}
indexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
if (indexQueries < 0) {
indexQueries = 0;
}
}
final List<Profile> result = database.command(
new OSQLSynchQuery<Profile>(
"SELECT * FROM Profile WHERE nick in ['ZZZJayLongNickIndex0' ,'ZZZJayLongNickIndex1', 'ZZZJayLongNickIndex2']"))
.execute();
final List<String> expectedSurnames = new ArrayList<String>(Arrays.asList("NolteIndex0", "NolteIndex1", "NolteIndex2"));
if (localStorage && !oldRecording) {
Orient.instance().getProfiler().stopRecording();
}
Assert.assertEquals(result.size(), 3);
for (final Profile profile : result) {
expectedSurnames.remove(profile.getSurname());
}
Assert.assertEquals(expectedSurnames.size(), 0);
if (localStorage) {
final long newIndexQueries = Orient.instance().getProfiler().getCounter("db.demo.query.indexUsed");
Assert.assertEquals(newIndexQueries, indexQueries + 1);
}
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexGetValuesUniqueIndex() {
database.command(new OCommandSQL("create index inIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("inIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:inIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("inIdx");
final Collection<Integer> multiGetResults = new ArrayList<Integer>(Arrays.asList(1, 3));
final Collection<OIdentifiable> indexCollection = index.getValues(Arrays.asList(1, 3));
Assert.assertEquals(indexCollection.size(), 2);
for (final OIdentifiable identifiable : indexCollection) {
multiGetResults.remove(identifiable.getIdentity().getClusterPosition().intValue());
}
Assert.assertEquals(multiGetResults.size(), 0);
database.command(new OCommandSQL("drop index inIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexGetValuesNotUniqueIndex() {
database.command(new OCommandSQL("create index inIdx notunique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("inIdx"));
for (int i = 0; i < 2; i++)
for (int key = 0; key <= 2; key++) {
database.command(new OCommandSQL("insert into index:inIdx (key,rid) values (" + key + ",#10:" + (i + key * 2) + ")"))
.execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("inIdx");
final Collection<Integer> multiGetResults = new ArrayList<Integer>(Arrays.asList(0, 1, 4, 5));
final Collection<OIdentifiable> indexCollection = index.getValues(Arrays.asList(0, 2));
Assert.assertEquals(indexCollection.size(), 4);
for (final OIdentifiable identifiable : indexCollection) {
multiGetResults.remove(identifiable.getIdentity().getClusterPosition().intValue());
}
Assert.assertEquals(multiGetResults.size(), 0);
database.command(new OCommandSQL("drop index inIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexGetEntriesUniqueIndex() {
database.command(new OCommandSQL("create index inIdx unique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("inIdx"));
for (int key = 0; key <= 5; key++) {
database.command(new OCommandSQL("insert into index:inIdx (key,rid) values (" + key + ",#10:" + key + ")")).execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("inIdx");
final Collection<Integer> multiGetResults = new ArrayList<Integer>(Arrays.asList(1, 3));
final Collection<ODocument> indexCollection = index.getEntries(Arrays.asList(1, 3));
Assert.assertEquals(indexCollection.size(), 2);
for (final ODocument doc : indexCollection) {
multiGetResults.remove(doc.<Integer> field("key"));
}
Assert.assertEquals(multiGetResults.size(), 0);
database.command(new OCommandSQL("drop index inIdx")).execute();
}
@Test(dependsOnMethods = "populateIndexDocuments")
public void testIndexGetEntriesNotUniqueIndex() {
database.command(new OCommandSQL("create index inIdx notunique")).execute();
database.getMetadata().getIndexManager().reload();
Assert.assertNotNull(database.getMetadata().getIndexManager().getIndex("inIdx"));
for (int i = 0; i < 2; i++)
for (int key = 0; key <= 2; key++) {
database.command(new OCommandSQL("insert into index:inIdx (key,rid) values (" + key + ",#10:" + (i + key * 2) + ")"))
.execute();
}
final OIndex<?> index = database.getMetadata().getIndexManager().getIndex("inIdx");
final Collection<Integer> multiGetResults = new ArrayList<Integer>(Arrays.asList(0, 0, 2, 2));
final Collection<ODocument> indexCollection = index.getEntries(Arrays.asList(0, 2));
Assert.assertEquals(indexCollection.size(), 4);
for (final ODocument doc : indexCollection) {
multiGetResults.remove(doc.<Integer> field("key"));
}
Assert.assertEquals(multiGetResults.size(), 0);
database.command(new OCommandSQL("drop index inIdx")).execute();
}
@Test
public void testIndexCount() {
final OIndex<?> nickIndex = database.getMetadata().getIndexManager().getIndex("Profile.nick");
final List<ODocument> result = database.query(new OSQLSynchQuery<Object>("select count(*) from index:Profile.nick"));
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(result.get(0).<Long> field("count").longValue(), nickIndex.getSize());
}
@SuppressWarnings("unchecked")
public void longTypes() {
database.getMetadata().getSchema().getClass("Profile").createProperty("hash", OType.LONG).createIndex(OClass.INDEX_TYPE.UNIQUE);
OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) database.getMetadata().getIndexManager().getIndex("Profile.hash");
for (int i = 0; i < 5; i++) {
Profile profile = new Profile("HashTest" + i).setHash(100l + i);
database.save(profile);
}
Iterator<Entry<Object, OIdentifiable>> it = idx.iterator();
while (it.hasNext()) {
it.next();
}
Assert.assertEquals(idx.getSize(), 5);
}
public void indexLinks() {
database.getMetadata().getSchema().getClass("Whiz").getProperty("account").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
final List<Account> result = database.command(new OSQLSynchQuery<Account>("select * from Account limit 1")).execute();
final OIndex<?> idx = database.getMetadata().getIndexManager().getIndex("Whiz.account");
for (int i = 0; i < 5; i++) {
final ODocument whiz = new ODocument("Whiz");
whiz.field("id", i);
whiz.field("text", "This is a test");
whiz.field("account", result.get(0).getRid());
whiz.save();
}
Assert.assertEquals(idx.getSize(), 5);
final List<ODocument> indexedResult = database.getUnderlying()
.command(new OSQLSynchQuery<Profile>("select * from Whiz where account = ?")).execute(result.get(0).getRid());
Assert.assertEquals(indexedResult.size(), 5);
for (final ODocument resDoc : indexedResult) {
resDoc.delete();
}
final ODocument whiz = new ODocument("Whiz");
whiz.field("id", 100);
whiz.field("text", "This is a test!");
whiz.field("account", new ODocument("Company").field("id", 9999));
whiz.save();
Assert.assertTrue(((ODocument) whiz.field("account")).getIdentity().isValid());
((ODocument) whiz.field("account")).delete();
whiz.delete();
}
public void linkedIndexedProperty() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("TestClass")) {
OClass testClass = db.getMetadata().getSchema().createClass("TestClass");
OClass testLinkClass = db.getMetadata().getSchema().createClass("TestLinkClass");
testClass.createProperty("testLink", OType.LINK, testLinkClass).createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
testClass.createProperty("name", OType.STRING).createIndex(OClass.INDEX_TYPE.UNIQUE);
testLinkClass.createProperty("testBoolean", OType.BOOLEAN);
testLinkClass.createProperty("testString", OType.STRING);
db.getMetadata().getSchema().save();
}
ODocument testClassDocument = db.newInstance("TestClass");
testClassDocument.field("name", "Test Class 1");
ODocument testLinkClassDocument = new ODocument("TestLinkClass");
testLinkClassDocument.field("testString", "Test Link Class 1");
testLinkClassDocument.field("testBoolean", true);
testClassDocument.field("testLink", testLinkClassDocument);
testClassDocument.save();
// THIS WILL THROW A java.lang.ClassCastException: com.orientechnologies.orient.core.id.ORecordId cannot be cast to
// java.lang.Boolean
List<ODocument> result = db.query(new OSQLSynchQuery<ODocument>("select from TestClass where testLink.testBoolean = true"));
Assert.assertEquals(result.size(), 1);
// THIS WILL THROW A java.lang.ClassCastException: com.orientechnologies.orient.core.id.ORecordId cannot be cast to
// java.lang.String
result = db.query(new OSQLSynchQuery<ODocument>("select from TestClass where testLink.testString = 'Test Link Class 1'"));
Assert.assertEquals(result.size(), 1);
db.close();
}
@Test(dependsOnMethods = "linkedIndexedProperty")
public void testLinkedIndexedPropertyInTx() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
db.begin();
ODocument testClassDocument = db.newInstance("TestClass");
testClassDocument.field("name", "Test Class 2");
ODocument testLinkClassDocument = new ODocument("TestLinkClass");
testLinkClassDocument.field("testString", "Test Link Class 2");
testLinkClassDocument.field("testBoolean", true);
testClassDocument.field("testLink", testLinkClassDocument);
testClassDocument.save();
db.commit();
// THIS WILL THROW A java.lang.ClassCastException: com.orientechnologies.orient.core.id.ORecordId cannot be cast to
// java.lang.Boolean
List<ODocument> result = db.query(new OSQLSynchQuery<ODocument>("select from TestClass where testLink.testBoolean = true"));
Assert.assertEquals(result.size(), 2);
// THIS WILL THROW A java.lang.ClassCastException: com.orientechnologies.orient.core.id.ORecordId cannot be cast to
// java.lang.String
result = db.query(new OSQLSynchQuery<ODocument>("select from TestClass where testLink.testString = 'Test Link Class 2'"));
Assert.assertEquals(result.size(), 1);
db.close();
}
public void testDictionary() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
OClass pClass = db.getMetadata().getSchema().createClass("Person2");
pClass.createProperty("firstName", OType.STRING);
pClass.createProperty("lastName", OType.STRING);
pClass.createProperty("age", OType.INTEGER);
pClass.createIndex("testIdx", INDEX_TYPE.DICTIONARY, "firstName", "lastName");
ODocument person = new ODocument("Person2");
person.field("firstName", "foo").field("lastName", "bar").save();
person = new ODocument("Person2");
person.field("firstName", "foo").field("lastName", "bar").field("age", 32).save();
db.close();
}
public void testConcurrentRemoveDelete() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("MyFruit")) {
OClass fruitClass = db.getMetadata().getSchema().createClass("MyFruit");
fruitClass.createProperty("name", OType.STRING);
fruitClass.createProperty("color", OType.STRING);
db.getMetadata().getSchema().getClass("MyFruit").getProperty("name").createIndex(OClass.INDEX_TYPE.UNIQUE);
db.getMetadata().getSchema().getClass("MyFruit").getProperty("color").createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
db.getMetadata().getSchema().save();
}
final int passCount = 10;
final int chunkSize = 1000;
for (int pass = 0; pass < passCount; pass++) {
List<ODocument> recordsToDelete = new ArrayList<ODocument>();
db.begin();
for (int i = 0; i < chunkSize; i++) {
ODocument d = new ODocument("MyFruit").field("name", "ABC" + pass + 'K' + i).field("color", "FOO" + pass);
d.save();
if (i < chunkSize / 2) {
recordsToDelete.add(d);
}
}
db.commit();
// do delete
db.begin();
for (final ODocument recordToDelete : recordsToDelete) {
Assert.assertNotNull(db.delete(recordToDelete));
}
db.commit();
}
db.close();
}
public void testIndexParamsAutoConversion() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("IndexTestTerm")) {
final OClass termClass = db.getMetadata().getSchema().createClass("IndexTestTerm");
termClass.createProperty("label", OType.STRING);
termClass.createIndex("idxTerm", INDEX_TYPE.UNIQUE, "label");
db.getMetadata().getSchema().save();
}
final ODocument doc = new ODocument("IndexTestTerm");
doc.field("label", "42");
doc.save();
final ORecordId result = (ORecordId) db.getMetadata().getIndexManager().getIndex("idxTerm").get("42");
Assert.assertNotNull(result);
Assert.assertEquals(result.getIdentity(), doc.getIdentity());
}
public void testTransactionUniqueIndexTestOne() {
ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("TransactionUniqueIndexTest")) {
final OClass termClass = db.getMetadata().getSchema().createClass("TransactionUniqueIndexTest");
termClass.createProperty("label", OType.STRING);
termClass.createIndex("idxTransactionUniqueIndexTest", INDEX_TYPE.UNIQUE, "label");
db.getMetadata().getSchema().save();
}
ODocument docOne = new ODocument("TransactionUniqueIndexTest");
docOne.field("label", "A");
docOne.save();
final List<ODocument> resultBeforeCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:idxTransactionUniqueIndexTest"));
Assert.assertEquals(resultBeforeCommit.size(), 1);
db.begin();
try {
ODocument docTwo = new ODocument("TransactionUniqueIndexTest");
docTwo.field("label", "A");
docTwo.save();
db.commit();
Assert.fail();
} catch (OIndexException oie) {
}
final List<ODocument> resultAfterCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:idxTransactionUniqueIndexTest"));
Assert.assertEquals(resultAfterCommit.size(), 1);
}
@Test(dependsOnMethods = "testTransactionUniqueIndexTestOne")
public void testTransactionUniqueIndexTestTwo() {
ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("TransactionUniqueIndexTest")) {
final OClass termClass = db.getMetadata().getSchema().createClass("TransactionUniqueIndexTest");
termClass.createProperty("label", OType.STRING);
termClass.createIndex("idxTransactionUniqueIndexTest", INDEX_TYPE.UNIQUE, "label");
db.getMetadata().getSchema().save();
}
final List<ODocument> resultBeforeCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:idxTransactionUniqueIndexTest"));
Assert.assertEquals(resultBeforeCommit.size(), 1);
db.begin();
try {
ODocument docOne = new ODocument("TransactionUniqueIndexTest");
docOne.field("label", "B");
docOne.save();
ODocument docTwo = new ODocument("TransactionUniqueIndexTest");
docTwo.field("label", "B");
docTwo.save();
db.commit();
Assert.fail();
} catch (OIndexException oie) {
}
final List<ODocument> resultAfterCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:idxTransactionUniqueIndexTest"));
Assert.assertEquals(resultAfterCommit.size(), 1);
}
public void testTransactionUniqueIndexTestWithDotNameOne() {
ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("TransactionUniqueIndexWithDotTest")) {
final OClass termClass = db.getMetadata().getSchema().createClass("TransactionUniqueIndexWithDotTest");
termClass.createProperty("label", OType.STRING).createIndex(INDEX_TYPE.UNIQUE);
db.getMetadata().getSchema().save();
}
ODocument docOne = new ODocument("TransactionUniqueIndexWithDotTest");
docOne.field("label", "A");
docOne.save();
final List<ODocument> resultBeforeCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:TransactionUniqueIndexWithDotTest.label"));
Assert.assertEquals(resultBeforeCommit.size(), 1);
long countClassBefore = db.countClass("TransactionUniqueIndexWithDotTest");
db.begin();
try {
ODocument docTwo = new ODocument("TransactionUniqueIndexWithDotTest");
docTwo.field("label", "A");
docTwo.save();
db.commit();
Assert.fail();
} catch (OIndexException oie) {
}
Assert.assertEquals(
((List<ODocument>) db.command(new OCommandSQL("select from TransactionUniqueIndexWithDotTest")).execute()).size(),
countClassBefore);
final List<ODocument> resultAfterCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:TransactionUniqueIndexWithDotTest.label"));
Assert.assertEquals(resultAfterCommit.size(), 1);
}
@Test(dependsOnMethods = "testTransactionUniqueIndexTestWithDotNameOne")
public void testTransactionUniqueIndexTestWithDotNameTwo() {
ODatabaseDocumentTx db = new ODatabaseDocumentTx(database.getURL());
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("TransactionUniqueIndexWithDotTest")) {
final OClass termClass = db.getMetadata().getSchema().createClass("TransactionUniqueIndexWithDotTest");
termClass.createProperty("label", OType.STRING).createIndex(INDEX_TYPE.UNIQUE);
db.getMetadata().getSchema().save();
}
final List<ODocument> resultBeforeCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:TransactionUniqueIndexWithDotTest.label"));
Assert.assertEquals(resultBeforeCommit.size(), 1);
db.begin();
try {
ODocument docOne = new ODocument("TransactionUniqueIndexWithDotTest");
docOne.field("label", "B");
docOne.save();
ODocument docTwo = new ODocument("TransactionUniqueIndexWithDotTest");
docTwo.field("label", "B");
docTwo.save();
db.commit();
Assert.fail();
} catch (OIndexException oie) {
}
final List<ODocument> resultAfterCommit = db.query(new OSQLSynchQuery<ODocument>(
"select from index:TransactionUniqueIndexWithDotTest.label"));
Assert.assertEquals(resultAfterCommit.size(), 1);
}
@Test(dependsOnMethods = "linkedIndexedProperty")
public void testIndexRemoval() {
List<ODocument> result = database.command(new OCommandSQL("select rid from index:Profile.nick")).execute();
Assert.assertNotNull(result);
ODocument firstProfile = null;
for (ODocument d : result) {
if (firstProfile == null)
firstProfile = d.field("rid");
Assert.assertFalse(d.containsField("key"));
Assert.assertTrue(d.containsField("rid"));
}
result = database.command(new OCommandSQL("select rid from index:Profile.nick where key = ?")).execute(
firstProfile.field("nick"));
Assert.assertNotNull(result);
Assert.assertEquals(result.get(0).field("rid", OType.LINK), firstProfile.getIdentity());
firstProfile.delete();
result = database.command(new OCommandSQL("select rid from index:Profile.nick where key = ?")).execute(
firstProfile.field("nick"));
Assert.assertTrue(result.isEmpty());
}
public void createInheritanceIndex() {
ODatabaseDocument db = new ODatabaseDocumentTx(database.getURL());
try {
db.open("admin", "admin");
if (!db.getMetadata().getSchema().existsClass("BaseTestClass")) {
OClass baseClass = db.getMetadata().getSchema().createClass("BaseTestClass");
OClass childClass = db.getMetadata().getSchema().createClass("ChildTestClass");
OClass anotherChildClass = db.getMetadata().getSchema().createClass("AnotherChildTestClass");
if (!baseClass.isSuperClassOf(childClass))
childClass.setSuperClass(baseClass);
if (!baseClass.isSuperClassOf(anotherChildClass))
anotherChildClass.setSuperClass(baseClass);
baseClass.createProperty("testParentProperty", OType.LONG).createIndex(OClass.INDEX_TYPE.NOTUNIQUE);
db.getMetadata().getSchema().save();
}
ODocument childClassDocument = db.newInstance("ChildTestClass");
childClassDocument.field("testParentProperty", 10L);
childClassDocument.save();
ODocument anotherChildClassDocument = db.newInstance("AnotherChildTestClass");
anotherChildClassDocument.field("testParentProperty", 11L);
anotherChildClassDocument.save();
Assert.assertFalse(new ORecordId(-1, ORecordId.CLUSTER_POS_INVALID).equals(childClassDocument.getIdentity()));
Assert.assertFalse(new ORecordId(-1, ORecordId.CLUSTER_POS_INVALID).equals(anotherChildClassDocument.getIdentity()));
} finally {
db.close();
}
}
@Test(dependsOnMethods = "createInheritanceIndex")
public void testIndexReturnOnlySpecifiedClass() throws Exception {
List<ODocument> result;
ODatabaseDocument db = database.getUnderlying();
result = db.command(new OSQLSynchQuery("select * from ChildTestClass where testParentProperty = 10")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(1, result.size());
Assert.assertEquals(10L, result.get(0).field("testParentProperty"));
result = db.command(new OCommandSQL("select * from AnotherChildTestClass where testParentProperty = 11")).execute();
Assert.assertNotNull(result);
Assert.assertEquals(1, result.size());
Assert.assertEquals(11L, result.get(0).field("testParentProperty"));
}
@Test
public void testManualIndexInTx() {
ODatabaseDocumentTx db = (ODatabaseDocumentTx) database.getUnderlying();
database.getMetadata().getSchema().createClass("ManualIndexTxClass");
OIndexManager idxManager = db.getMetadata().getIndexManager();
idxManager.createIndex("manualTxIndexTest", "UNIQUE", new OSimpleKeyIndexDefinition(OType.INTEGER), null, null);
OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) idxManager.getIndex("manualTxIndexTest");
ODocument v0 = new ODocument("ManualIndexTxClass");
v0.field("counter", 0);
v0.save();
idx.put(0, v0);
Assert.assertTrue(idx.contains(0));
db.begin(OTransaction.TXTYPE.OPTIMISTIC);
ODocument v = new ODocument("ManualIndexTxClass");
v.field("counter", 52);
v.save();
ODocument v2 = new ODocument("ManualIndexTxClass");
v2.field("counter", 54);
v2.save();
Assert.assertNotNull(idx);
idx.remove(0);
idx.put(52, v);
db.commit();
Assert.assertTrue(idx.contains(52));
Assert.assertFalse(idx.contains(0));
Assert.assertTrue(idx.get(52).getIdentity().isPersistent());
Assert.assertEquals(idx.get(52).getIdentity(), v.getIdentity());
}
@Test
public void testManualIndexInTxRecursiveStore() {
ODatabaseDocumentTx db = (ODatabaseDocumentTx) database.getUnderlying();
database.getMetadata().getSchema().createClass("ManualIndexTxRecursiveStoreClass");
OIndexManager idxManager = db.getMetadata().getIndexManager();
idxManager.createIndex("manualTxIndexRecursiveStoreTest", "UNIQUE", new OSimpleKeyIndexDefinition(OType.INTEGER), null, null);
OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) idxManager.getIndex("manualTxIndexRecursiveStoreTest");
ODocument v0 = new ODocument("ManualIndexTxRecursiveStoreClass");
v0.field("counter", 0);
v0.save();
idx.put(0, v0);
Assert.assertTrue(idx.contains(0));
db.begin(OTransaction.TXTYPE.OPTIMISTIC);
ODocument v = new ODocument("ManualIndexTxRecursiveStoreClass");
v.field("counter", 52);
ODocument v2 = new ODocument("ManualIndexTxRecursiveStoreClass");
v2.field("counter", 54);
v2.field("link", v);
v2.save();
v.field("link", v2);
v.save();
Assert.assertNotNull(idx);
idx.remove(0);
idx.put(52, v);
idx.put(54, v2);
db.commit();
Assert.assertTrue(idx.contains(52));
Assert.assertTrue(idx.contains(54));
Assert.assertFalse(idx.contains(0));
Assert.assertTrue(idx.get(52).getIdentity().isPersistent());
Assert.assertEquals(idx.get(52).getIdentity(), v.getIdentity());
Assert.assertTrue(idx.get(54).getIdentity().isPersistent());
Assert.assertEquals(idx.get(54).getIdentity(), v2.getIdentity());
}
public void testIndexCountPlusCondition() {
OIndexManager idxManager = database.getMetadata().getIndexManager();
idxManager.createIndex("IndexCountPlusCondition", "NOTUNIQUE", new OSimpleKeyIndexDefinition(OType.INTEGER), null, null);
final OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) idxManager.getIndex("IndexCountPlusCondition");
final Map<Integer, Long> keyDocsCount = new HashMap<Integer, Long>();
for (int i = 1; i < 100; i++) {
final Integer key = (int) Math.log(i);
final ODocument doc = new ODocument();
doc.save();
idx.put(key, doc);
if (keyDocsCount.containsKey(key))
keyDocsCount.put(key, keyDocsCount.get(key) + 1);
else
keyDocsCount.put(key, 1L);
}
for (Map.Entry<Integer, Long> entry : keyDocsCount.entrySet()) {
List<ODocument> result = database.query(new OSQLSynchQuery<ODocument>(
"select count(*) from index:IndexCountPlusCondition where key = ?"), entry.getKey());
Assert.assertEquals(result.get(0).<Long> field("count"), entry.getValue());
}
}
public void testNotUniqueIndexKeySize() {
OIndexManager idxManager = database.getMetadata().getIndexManager();
idxManager.createIndex("IndexNotUniqueIndexKeySize", "NOTUNIQUE", new OSimpleKeyIndexDefinition(OType.INTEGER), null, null);
final OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) idxManager.getIndex("IndexNotUniqueIndexKeySize");
final Set<Integer> keys = new HashSet<Integer>();
for (int i = 1; i < 100; i++) {
final Integer key = (int) Math.log(i);
final ODocument doc = new ODocument();
doc.save();
idx.put(key, doc);
keys.add(key);
}
Assert.assertEquals(idx.getKeySize(), keys.size());
}
public void testNotUniqueIndexSize() {
OIndexManager idxManager = database.getMetadata().getIndexManager();
idxManager.createIndex("IndexNotUniqueIndexSize", "NOTUNIQUE", new OSimpleKeyIndexDefinition(OType.INTEGER), null, null);
final OIndex<OIdentifiable> idx = (OIndex<OIdentifiable>) idxManager.getIndex("IndexNotUniqueIndexSize");
for (int i = 1; i < 100; i++) {
final Integer key = (int) Math.log(i);
final ODocument doc = new ODocument();
doc.save();
idx.put(key, doc);
}
Assert.assertEquals(idx.getSize(), 99);
}
@Test
public void testIndexRebuildDuringNonProxiedObjectDelete() {
Profile profile = new Profile("NonProxiedObjectToDelete", "NonProxiedObjectToDelete", "NonProxiedObjectToDelete", null);
profile = database.save(profile);
OIndexManager idxManager = database.getMetadata().getIndexManager();
OIndex<?> nickIndex = idxManager.getIndex("Profile.nick");
Assert.assertTrue(nickIndex.contains("NonProxiedObjectToDelete"));
final Profile loadedProfile = database.load(new ORecordId(profile.getId()));
database.delete(database.detach(loadedProfile, true));
Assert.assertFalse(nickIndex.contains("NonProxiedObjectToDelete"));
}
@Test(dependsOnMethods = "testIndexRebuildDuringNonProxiedObjectDelete")
public void testIndexRebuildDuringDetachAllNonProxiedObjectDelete() {
Profile profile = new Profile("NonProxiedObjectToDelete", "NonProxiedObjectToDelete", "NonProxiedObjectToDelete", null);
profile = database.save(profile);
OIndexManager idxManager = database.getMetadata().getIndexManager();
OIndex<?> nickIndex = idxManager.getIndex("Profile.nick");
Assert.assertTrue(nickIndex.contains("NonProxiedObjectToDelete"));
final Profile loadedProfile = database.load(new ORecordId(profile.getId()));
database.delete(database.detachAll(loadedProfile, true));
Assert.assertFalse(nickIndex.contains("NonProxiedObjectToDelete"));
}
private List<OClusterPosition> getValidPositions(int clusterId) {
final List<OClusterPosition> positions = new ArrayList<OClusterPosition>();
final ORecordIteratorCluster<?> iteratorCluster = database.getUnderlying()
.browseCluster(database.getClusterNameById(clusterId));
for (int i = 0; i < 7; i++) {
if (!iteratorCluster.hasNext())
break;
ORecord<?> doc = iteratorCluster.next();
positions.add(doc.getIdentity().getClusterPosition());
}
return positions;
}
}
| nengxu/OrientDB | tests/src/test/java/com/orientechnologies/orient/test/database/auto/IndexTest.java | Java | apache-2.0 | 64,141 |
using Telerik.Geospatial;
using Windows.Foundation;
namespace Telerik.UI.Xaml.Controls.Map
{
/// <summary>
/// Defines a contract that allows arbitrary data items to be visualized within a <see cref="RadMap"/> instance.
/// </summary>
public interface IMapDataItem
{
/// <summary>
/// The geographic location where the item needs to be displayed.
/// </summary>
Location Location
{
get;
}
/// <summary>
/// The minimum of the zoom range in which the item is visible.
/// </summary>
double MinZoom
{
get;
}
/// <summary>
/// The maximum of the zoom range in which the item is visible.
/// </summary>
double MaxZoom
{
get;
}
/// <summary>
/// Defines how the visual representation of the data item is aligned with the physical coordinates of the <see cref="Location"/> property.
/// A value of (0.5, 0.5) will center the visual representation over the geographic location.
/// </summary>
Point LocationOrigin
{
get;
}
}
}
| geotinc/UI-For-UWP | Controls/Map/Map.UWP/Layers/IMapDataItem.cs | C# | apache-2.0 | 1,199 |
/*
* Copyright 2016 Layne Mobile, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.laynemobile.proxy.functions;
/** {@inheritDoc} */
public interface Action6<T1, T2, T3, T4, T5, T6> extends Action,
rx.functions.Action6<T1, T2, T3, T4, T5, T6> {}
| LayneMobile/Proxy | proxy-functions-rx/src/main/java/com/laynemobile/proxy/functions/Action6.java | Java | apache-2.0 | 786 |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/enums/ad_serving_optimization_status.proto
package com.google.ads.googleads.v10.enums;
public interface AdServingOptimizationStatusEnumOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.ads.googleads.v10.enums.AdServingOptimizationStatusEnum)
com.google.protobuf.MessageOrBuilder {
}
| googleads/google-ads-java | google-ads-stubs-v10/src/main/java/com/google/ads/googleads/v10/enums/AdServingOptimizationStatusEnumOrBuilder.java | Java | apache-2.0 | 410 |
'use strict';
/**
* @ngdoc function
* @name sbAdminApp.controller:MainCtrl
* @description
* # MainCtrl
* Controller of the sbAdminApp
*/
angular.module('sbAdminApp')
.controller('ChartCtrl', ['$scope', '$timeout', '$http', 'employeeService', 'socket', '$modalStack', 'settingsService', '$state', function ($scope, $timeout, $http, employeeService, socket, $modalStack, settingsService, $state) {
var currentUser = Parse.User.current();
if(!currentUser){
$state.go('login');
}
var settingId = currentUser.get('settingId');
var fingerPrintIdPool = [];
var idToBeDeleted = '';
$scope.totalUsers = null;
// $scope.uploadFile = {};
$scope.isScanFinger = true;
$scope.defaultProfPic = "img/logo/logo_placeholder.png";
$scope.scanStatus = 'Scan';
$scope.sortLists=[{id:0, name:"Id"},{id:1,name:"firstName"},{id:2,name:"lastName"},{id:3,name:"gender"},{id:4,name:"age"}]
$scope.changedValue=function(item){
if(item.name === 'Id'){
getAll('fingerPrintId');
}
getAll(item.name);
}
var currentEmployee = '';
function getAll(sort){
employeeService.getEmployees(sort)
.then(function(results) {
// Handle the result
console.log(results);
$scope.rowCollection = results;
$scope.totalUsers = results.length;
return results;
}, function(err) {
// Error occurred
console.log(err);
}, function(percentComplete) {
console.log(percentComplete);
});
};
getAll();
getSettings();
function getSettings(){
settingsService.getSetting(settingId)
.then(function(results) {
// Handle the result
console.log(results);
$scope.settings = results[0];
console.log($scope.settings);
$scope.userTable = $scope.settings.get('userTable');
$scope.userInfo = {}
fingerPrintIdPool = $scope.settings.get('fingerPrintIdPool');
console.log(fingerPrintIdPool);
}, function(err) {
// Error occurred
console.log(err);
}, function(percentComplete) {
console.log(percentComplete);
});
};
$scope.user = {
'firstName' : '',
'lastName' : '',
'gender' : 'Male',
'age' : ''
}
$scope.modal = {
title : '',
mode : '',
isUpdate : false
}
$scope.deleteSelected = function(){
var delay = 0;
$scope.isDeleteProgress = true;
(function myLoop (i) {
if($scope.rowCollection[i-1].get('isSelected')){
setTimeout(function () {
currentEmployee = $scope.rowCollection[i-1];
$scope.deleteUser();
if (--i){
myLoop(i);
}else{
$scope.isDeleteProgress = false;
$scope.isDeleteCompleted = true;
}
}, 5000)
}else{
if (--i) myLoop(i);
}
})($scope.rowCollection.length);
}
$scope.selectedUser = function(user, status, isAll){
console.log(user);
console.log(status);
if(!isAll){
user.set('isSelected', status);
}else{
angular.forEach($scope.rowCollection, function(value, key) {
value.set('isSelected', status);
});
}
};
$scope.openModal = function () {
$scope.modal.title = 'Add User';
$scope.modal.mode = 'Create';
$scope.modal.isUpdate = false;
$scope.user.employeeId = '';
$scope.user.firstName = '';
$scope.user.lastName = '';
$scope.user.gender = 'Male';
$scope.user.age = '';
$scope.user.position = '';
$scope.previewImage = '';
$scope.scanStatus = 'Scan';
$scope.buttonScanStatus = 'btn-info';
$scope.deleteConfirmation = false;
};
$scope.editModal = function (id) {
console.log(id);
$scope.modal.title = 'Edit User';
$scope.modal.mode = 'Update';
$scope.modal.isUpdate = true;
currentEmployee = '';
$scope.previewImage = '';
$scope.scanStatus = 'Change Fingerprint';
$scope.buttonScanStatus = 'btn-info';
$scope.deleteConfirmation = false;
$scope.isCurrentFingerDeleted = false;
employeeService.getEmployee(id)
.then(function(result) {
// Handle the result
console.log(result);
$scope.user.employeeId = result[0].get('employeeId');
$scope.user.firstName = result[0].get('firstName');
$scope.user.lastName = result[0].get('lastName');
$scope.user.gender = result[0].get('gender');
$scope.user.age = result[0].get('age');
$scope.user.position = result[0].get('position');
$scope.user.fingerPrintId = result[0].get('fingerPrintId');
$scope.previewImage = result[0].get('avatarUrl');
currentEmployee = result[0];
}, function(err) {
// Error occurred
console.log(err);
}, function(percentComplete) {
console.log(percentComplete);
});
};
$scope.updateUser = function(){
console.log($scope.uploadFile);
currentEmployee.set("employeeId", $scope.user.employeeId);
currentEmployee.set("firstName", $scope.user.firstName);
currentEmployee.set("lastName", $scope.user.lastName);
currentEmployee.set("gender", $scope.user.gender);
currentEmployee.set("age", $scope.user.age);
currentEmployee.set("position", $scope.user.position);
if($scope.isCurrentFingerDeleted){
var fingerPrintId = fingerPrintIdPool[0];
removeA(fingerPrintIdPool, fingerPrintId);
currentEmployee.set("fingerPrintId", fingerPrintId.toString());
}
if($scope.uploadFile){
$http.post("http://172.24.1.1:1337/parse/files/image.jpg", $scope.uploadFile, {
withCredentials: false,
headers: {
'X-Parse-Application-Id': 'myAppId',
'X-Parse-REST-API-Key': 'myRestAPIKey',
'Content-Type': 'image/jpeg'
},
transformRequest: angular.identity
}).then(function(data) {
currentEmployee.set("avatarUrl", data.data.url);
currentEmployee.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
getAll();
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
console.log(result);
getSettings();
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},function(err){
alert('Picture should not exceed 2mb, Please Try again.');
});
} else{
currentEmployee.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
getAll();
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
console.log(result);
getSettings();
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
}
}
$scope.manualDeleteUserFromSensor = function(){
console.log($scope.detectedFingerPrintId);
employeeService.getEmployeeByFingerPrintId($scope.detectedFingerPrintId)
.then(function(result) {
// Handle the result
var detectedEmployee = result[0];
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
fingerPrintIdPool.push(parseInt($scope.detectedFingerPrintId));
settings.set("fingerPrintIdPool", fingerPrintIdPool);
console.log(fingerPrintIdPool);
settings.save(null, {
success: function(result) {
getSettings();
idToBeDeleted = parseInt($scope.detectedFingerPrintId);
socket.emit('toPublicServer', 'm:delete');
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
if(result.length){
var detectedEmployee = result[0];
detectedEmployee.set("fingerPrintId", "");
detectedEmployee.save(null, {
success: function(result) {
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
}
}, function(err) {
// Error occurred
console.log(err);
}, function(percentComplete) {
console.log(percentComplete);
});
}
$scope.deleteUser = function(){
console.log(parseInt(currentEmployee.get('fingerPrintId')));
idToBeDeleted = parseInt(currentEmployee.get('fingerPrintId'));
currentEmployee.destroy({
success: function(myObject) {
$scope.modal.title = 'This User no longer exists.';
$scope.modal.mode = 'Create';
$scope.modal.isUpdate = false;
$scope.user.employeeId = '';
$scope.user.firstName = '';
$scope.user.lastName = '';
$scope.user.gender = 'Male';
$scope.user.age = '';
$scope.user.position = '';
$scope.previewImage = '';
$scope.scanStatus = 'Scan';
$scope.buttonScanStatus = 'btn-info';
$modalStack.dismissAll();
getAll();
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
fingerPrintIdPool.push(parseInt(currentEmployee.get('fingerPrintId')));
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
console.log(result);
getSettings();
socket.emit('toPublicServer', 'm:delete');
var DailyLogObject = Parse.Object.extend("DailyLog");
var query = new Parse.Query(DailyLogObject);
query.equalTo("employeeId", currentEmployee.id);
query.find().then(function (users) {
users.forEach(function(user) {
user.destroy({
success: function() {
// SUCCESS CODE HERE, IF YOU WANT
console.log('daily log deleted');
},
error: function() {
// ERROR CODE HERE, IF YOU WANT
console.log('daily log error delete');
}
});
});
}, function (error) {
response.error(error);
});
var PeriodLogObject = Parse.Object.extend("PeriodLog");
var queryPeriod = new Parse.Query(PeriodLogObject);
queryPeriod.equalTo("employeeId", currentEmployee.id);
queryPeriod.find().then(function (users) {
users.forEach(function(user) {
user.destroy({
success: function() {
// SUCCESS CODE HERE, IF YOU WANT
console.log('period log deleted');
},
error: function() {
// ERROR CODE HERE, IF YOU WANT
console.log('period log error delete');
}
});
});
}, function (error) {
response.error(error);
});
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},
error: function(myObject, error) {
// The delete failed.
// error is a Parse.Error with an error code and message.
}
});
}
$scope.confirmDelete = function(){
$scope.deleteConfirmation = true;
}
$scope.cancelDelete = function(){
$scope.deleteConfirmation = false;
}
$scope.convertToMB = function(size){
var value = size/1000000;
if(value){
return value.toFixed(2);
}else{
return 0;
}
}
$scope.checkFileSize = function(size){
if(size > 2000000){
return 'log-bold';
} else{
return '';
}
}
$scope.addUser = function(){
console.log($scope.uploadFile);
if($scope.uploadFile){
$http.post("http://172.24.1.1:1337/parse/files/image.jpg", $scope.uploadFile, {
withCredentials: false,
headers: {
'X-Parse-Application-Id': 'myAppId',
'X-Parse-REST-API-Key': 'myRestAPIKey',
'Content-Type': 'image/jpeg'
},
transformRequest: angular.identity
}).then(function(data) {
console.log(data.data.url);
var Employee = Parse.Object.extend("Employee");
var employee = new Employee();
var fingerPrintId = fingerPrintIdPool[0];
removeA(fingerPrintIdPool, fingerPrintId);
employee.set("employeeId", $scope.user.employeeId);
employee.set("firstName", $scope.user.firstName);
employee.set("lastName", $scope.user.lastName);
employee.set("gender", $scope.user.gender);
employee.set("age", $scope.user.age);
employee.set("position", $scope.user.position);
employee.set("avatarUrl", data.data.url);
employee.set("fingerPrintId", fingerPrintId.toString());
employee.set("currentPeriodLog", {"id":null,"date":null,"sequence":0,"totalTime":0});
employee.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
getAll();
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
console.log(result);
getSettings();
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},function(err){
alert('Picture should not exceed 2mb, Please Try again.');
});
}
else {
var Employee = Parse.Object.extend("Employee");
var employee = new Employee();
var fingerPrintId = $scope.totalUsers + 1
var fingerPrintId = fingerPrintIdPool[0];
removeA(fingerPrintIdPool, fingerPrintId);
employee.set("employeeId", $scope.user.employeeId);
employee.set("firstName", $scope.user.firstName);
employee.set("lastName", $scope.user.lastName);
employee.set("gender", $scope.user.gender);
employee.set("age", $scope.user.age);
employee.set("position", $scope.user.position);
employee.set("avatarUrl", $scope.defaultProfPic);
employee.set("fingerPrintId", fingerPrintId.toString());
employee.set("currentPeriodLog", {"id":null,"date":null,"sequence":0,"totalTime":0});
employee.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
getAll();
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
console.log(result);
getSettings();
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
}
};
$scope.scanFinger = function(){
console.log('Scan Finger');
socket.emit('toPublicServer', 'm:enroll');
$scope.isScanFinger = false;
}
$scope.updateFingerPrintInit = function(){
console.log('Delete Old FingerPrint');
$scope.isCurrentFingerDeleted = false;
idToBeDeleted = $scope.user.fingerPrintId;
console.log(idToBeDeleted);
if(idToBeDeleted){
console.log('not empty');
var Settings = Parse.Object.extend("Settings");
var settings = new Settings();
settings.id = settingId;
fingerPrintIdPool.push(parseInt(idToBeDeleted));
settings.set("fingerPrintIdPool", fingerPrintIdPool);
settings.save(null, {
success: function(result) {
// Execute any logic that should take place after the object is saved.
$scope.userTableResult = [];
getSettings();
socket.emit('toPublicServer', 'm:delete');
},
error: function(gameScore, error) {
// Execute any logic that should take place if the save fails.
// error is a Parse.Error with an error code and message.
}
});
} else {
$scope.isCurrentFingerDeleted = true;
$scope.scanStatus = 'Click to Continue';
$scope.buttonScanStatus = 'btn-success';
}
}
$scope.updateFingerPrintGo = function(){
console.log('Update Scan Finger');
socket.emit('toPublicServer', 'm:enroll');
}
socket.on('fromPublicServer', function(data){
console.log(data);
var tmp = fingerPrintIdPool[0];
if(stringContains(data, 'm:enroll')){
console.log(tmp);
socket.emit('toPublicServer', tmp.toString());
}
if(stringContains(data, 'm:delete')){
console.log(tmp);
socket.emit('toPublicServer', idToBeDeleted.toString());
}
if(stringContains(data, 'Deleted!')){
$scope.isCurrentFingerDeleted = true;
socket.emit('toPublicServer', idToBeDeleted.toString());
$scope.scanStatus = 'Click to Continue';
$scope.buttonScanStatus = 'btn-success';
}
if(stringContains(data, 'command:place.finger.1')){
$scope.scanStatus = 'Please Place Finger';
$scope.buttonScanStatus = 'btn-warning';
}
if(stringContains(data, 'command:remove.finger')){
$scope.scanStatus = 'Please Remove Finger';
}
if(stringContains(data, 'command:place.finger.2')){
$scope.scanStatus = 'Place Same Finger Again';
}
if(stringContains(data, 'Ok.status:prints.matched.success')){
$scope.scanStatus = 'Prints Matched';
}
if(stringContains(data, 'Ok.status:print')){
console.log('print stored');
$scope.buttonScanStatus = 'btn-success';
$scope.scanStatus = 'Print Successfully Stored!';
}
if(stringContains(data, 'status.prints.matched.failed')){
$scope.buttonScanStatus = 'btn-danger';
$scope.scanStatus = 'Prints Not Matched.';
alert('Prints Not Matched. Please Try Again.');
socket.emit('toPublicServer', tmp.toString());
}
if(stringContains(data, 'found:')){
console.log(tmp);
var tmpData = data;
tmpData = tmpData.split(":");
$scope.isDetectedFingerPrint = true;
$scope.detectedFingerPrintId = tmpData[1].toString();
}
});
$scope.closeModal = function(){
console.log('Close Modal');
socket.emit('toPublicServer', 'x');
}
$scope.$on("$destroy", function(){
socket.removeAllListeners("fromPublicServer");
});
function stringContains(data, compare){
return data.indexOf(compare) > -1;
}
function removeA(arr) {
var what, a = arguments, L = a.length, ax;
while (L > 1 && arr.length) {
what = a[--L];
while ((ax= arr.indexOf(what)) !== -1) {
arr.splice(ax, 1);
}
}
return arr;
}
}]);
| vynci/deped-tas | app/scripts/controllers/chartContoller.js | JavaScript | apache-2.0 | 22,657 |
sap.ui.define([
"sap/ui/core/mvc/Controller",
"sap/base/util/deepClone"
], function (Controller, deepClone) {
"use strict";
var oCardManifest = {
"_version": "1.8.0",
"sap.app": {
"id": "dataMode1",
"type": "card",
"i18n": "i18n/i18n.properties"
},
"sap.ui5": {
"services": {
"RandomRevenue": {
"factoryName": "cardsdemo.service.RandomRevenueFactory"
}
}
},
"sap.card": {
"type": "Analytical",
"header": {
"type": "Numeric",
"data": {
"request": {
"url": "../kpi.json"
},
"path": "/kpiInfos/kpi"
},
"title": "{{contactDetails}}",
"subTitle": "Revenue",
"unitOfMeasurement": "EUR",
"mainIndicator": {
"number": "{number}",
"unit": "{unit}",
"trend": "{trend}",
"state": "{state}"
},
"details": "{details}",
"sideIndicators": [
{
"title": "Target",
"number": "{target/number}",
"unit": "{target/unit}"
},
{
"title": "Deviation",
"number": "{deviation/number}",
"unit": "%"
}
]
},
"content": {
"data": {
"service": {
"name": "RandomRevenue"
},
"path": "/"
},
"chartType": "Line",
"legend": {
"visible": true,
"position": "Right",
"alignment": "Center"
},
"plotArea": {
"dataLabel": {
"visible": true
}
},
"title": {
"text": "Line chart",
"visible": true,
"alignment": "Bottom"
},
"measureAxis": "valueAxis",
"dimensionAxis": "categoryAxis",
"dimensions": [
{
"label": "Weeks",
"value": "{Week}"
}
],
"measures": [
{
"label": "Revenue",
"value": "{Revenue}"
},
{
"label": "Cost",
"value": "{Cost}"
}
]
}
}
};
return Controller.extend("sap.f.cardsdemo.controller.DataMode", {
onBeforeRendering: function () {
this.getView().byId("card").setManifest(oCardManifest);
this.getView().byId("card").setBaseUrl("./cardsdemo/cardcontent/objectcontent/");
},
onSelectionChange: function (oEvent) {
var sDataMode = oEvent.getParameter("item").getText();
this.getView().byId("card").setDataMode(sDataMode);
},
onTryToRefresh: function () {
var oCard = this.getView().byId("card");
if (oCard) {
this.getView().byId("card").refresh();
}
},
onSubmit: function (oEvent) {
var iInterval = oEvent.getParameter("value"),
oClone = deepClone(oCardManifest);
oClone["sap.card"].content.data.updateInterval = iInterval;
this.getView().byId("card").setManifest(oClone);
}
});
}); | SAP/openui5 | src/sap.f/test/sap/f/cardsdemo/controller/DataMode.controller.js | JavaScript | apache-2.0 | 2,613 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.guvnor.client.screens.clients;
import javax.enterprise.context.Dependent;
import com.google.gwt.core.client.GWT;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Widget;
import org.uberfire.client.annotations.WorkbenchPartTitle;
import org.uberfire.client.annotations.WorkbenchPartView;
import org.uberfire.client.annotations.WorkbenchScreen;
@Dependent
@WorkbenchScreen(identifier = "oauthClientSettingsScreen")
public class OAuthClientSettingsScreenPresenter
extends Composite {
interface Binder
extends
UiBinder<Widget, OAuthClientSettingsScreenPresenter> {
}
private static Binder uiBinder = GWT.create(Binder.class);
public OAuthClientSettingsScreenPresenter() {
initWidget(uiBinder.createAndBindUi(this));
}
@WorkbenchPartView
public Widget getWidget() {
return this;
}
@WorkbenchPartTitle
public String getTitle() {
return "Settings";
}
}
| porcelli-forks/guvnor | guvnor-webapp/src/main/java/org/guvnor/client/screens/clients/OAuthClientSettingsScreenPresenter.java | Java | apache-2.0 | 1,631 |
/*
* Licensed to the Technische Universität Darmstadt under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The Technische Universität Darmstadt
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.api.annotation.layer;
import static de.tudarmstadt.ukp.clarin.webanno.support.lambda.LambdaBehavior.enabledWhen;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextArea;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.PropertyModel;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.coloring.ColoringRulesConfigurationPanel;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.layer.behaviors.OverlapModeSelect;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.layer.behaviors.ValidationModeSelect;
import de.tudarmstadt.ukp.clarin.webanno.model.AnnotationLayer;
public class RelationLayerTraitsEditor
extends LayerTraitsEditor_ImplBase<RelationLayerTraits, RelationLayerSupport>
{
private static final long serialVersionUID = -9082045435380184514L;
public RelationLayerTraitsEditor(String aId, RelationLayerSupport aLayerSupport,
IModel<AnnotationLayer> aLayer)
{
super(aId, aLayerSupport, aLayer);
}
@Override
protected void initializeForm(Form<RelationLayerTraits> aForm)
{
aForm.add(new ValidationModeSelect("validationMode", getLayerModel()));
OverlapModeSelect overlapMode = new OverlapModeSelect("overlapMode", getLayerModel());
// Not configurable for layers that attach to tokens (currently that is the only layer on
// which we use the attach feature)
overlapMode.add(enabledWhen(() -> getLayerModelObject().getAttachFeature() == null));
aForm.add(overlapMode);
aForm.add(new ColoringRulesConfigurationPanel("coloringRules", getLayerModel(),
getTraitsModel().bind("coloringRules.rules")));
CheckBox crossSentence = new CheckBox("crossSentence");
crossSentence.setOutputMarkupPlaceholderTag(true);
crossSentence.setModel(PropertyModel.of(getLayerModel(), "crossSentence"));
// Not configurable for layers that attach to tokens (currently that is the only layer on
// which we use the attach feature)
crossSentence.add(enabledWhen(() -> getLayerModelObject().getAttachFeature() == null));
aForm.add(crossSentence);
TextArea<String> onClickJavascriptAction = new TextArea<String>("onClickJavascriptAction");
onClickJavascriptAction
.setModel(PropertyModel.of(getLayerModel(), "onClickJavascriptAction"));
onClickJavascriptAction.add(new AttributeModifier("placeholder",
"alert($PARAM.PID + ' ' + $PARAM.PNAME + ' ' + $PARAM.DOCID + ' ' + "
+ "$PARAM.DOCNAME + ' ' + $PARAM.fieldname);"));
aForm.add(onClickJavascriptAction);
}
}
| webanno/webanno | webanno-api-annotation/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/annotation/layer/RelationLayerTraitsEditor.java | Java | apache-2.0 | 3,634 |
/*------------------------------------------------------------------------------
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*------------------------------------------------------------------------------
*/
package com.harris.challenge.incidents;
import android.app.Activity;
public class TwoPlacesAtOnce extends Activity {
}
| annchovie/edgwoodtripletherat | src/com/harris/challenge/incidents/TwoPlacesAtOnce.java | Java | apache-2.0 | 843 |
package com.epam.androidtraining.activities;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import com.epam.androidtraining.R;
import com.epam.androidtraining.adapter.MessagePagerAdapter;
import com.epam.androidtraining.model.MessageModel;
import java.util.ArrayList;
import java.util.List;
public class ViewPagerSampleActivity extends AppCompatActivity {
private List<MessageModel> mMessageList;
private int COUNT_PAGES = 5;
@Override
protected void onCreate(@Nullable final Bundle pSavedInstanceState) {
super.onCreate(pSavedInstanceState);
setContentView(R.layout.activity_view_pager);
loadData();
final ViewPager mViewPager = findViewById(R.id.view_pager);
mViewPager.setAdapter(new MessagePagerAdapter(getSupportFragmentManager(), mMessageList));
}
private void loadData() {
mMessageList = new ArrayList<>();
}
}
| IstiN/android-training-2017 | app/src/main/java/com/epam/androidtraining/activities/ViewPagerSampleActivity.java | Java | apache-2.0 | 1,018 |
using System.Collections.Generic;
using System.Threading.Tasks;
namespace XTCSample.Services.Person
{
public interface IPeopleService
{
Task<IEnumerable<Models.Person>> GetPeople(int count = 100);
}
} | mallibone/XtcSample | XTCSample/XTCSample/Services/Person/IPeopleService.cs | C# | apache-2.0 | 221 |
package com.zerodes.bta.domain;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinColumns;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.Table;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.hibernate.annotations.ForeignKey;
import com.zerodes.bta.enums.CategoryTypeEnum;
@Entity
@Table(name = "TCategory")
@NamedQueries({
@NamedQuery(name = "findCategoriesByUser", query = "select cat from Category cat where user = ?1 order by type, name"),
@NamedQuery(name = "findCategoryByName", query = "select cat from Category cat where user = ?1 and name = ?2")
})
public class Category {
@Column(name = "CategoryId", nullable = false)
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long categoryId;
@ManyToOne
@JoinColumns( { @JoinColumn(name = "UserId", referencedColumnName = "UserId", nullable = false) })
@ForeignKey(name = "FK_Transaction_User")
private User user;
@Column(name = "Name", length = 255, nullable = false)
private String name;
@Column(name = "Type", length = 20, nullable = false)
@Enumerated(EnumType.STRING)
private CategoryTypeEnum type;
public long getCategoryId() {
return categoryId;
}
public void setCategoryId(long categoryId) {
this.categoryId = categoryId;
}
public User getUser() {
return user;
}
public void setUser(User user) {
this.user = user;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public CategoryTypeEnum getType() {
return type;
}
public void setType(CategoryTypeEnum type) {
this.type = type;
}
public String toString() {
return new ToStringBuilder(this)
.append("categoryId", categoryId)
.append("user", user)
.append("name", name)
.append("type", type)
.toString();
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(user)
.append(name)
.append(type)
.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof Category)) {
return false;
}
Category other = (Category) obj;
return new EqualsBuilder()
.append(user, other.user)
.append(name, other.name)
.append(type, other.type)
.isEquals();
}
}
| gdenning/bta | src/main/java/com/zerodes/bta/domain/Category.java | Java | apache-2.0 | 2,748 |
/*
* Copyright 2015-present Boundless Spatial Inc., http://boundlessgeo.com
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations
* under the License.
*/
/**
* A collection of useful constants.
* @ignore
*/
export const LAYER_VERSION_KEY = 'bnd:layer-version';
export const SOURCE_VERSION_KEY = 'bnd:source-version';
export const TITLE_KEY = 'bnd:title';
export const TIME_KEY = 'bnd:time';
export const TIME_START_KEY = 'bnd:start-time';
export const TIME_END_KEY = 'bnd:end-time';
export const DATA_VERSION_KEY = 'bnd:data-version';
export const GROUPS_KEY = 'mapbox:groups';
export const GROUP_KEY = 'mapbox:group';
export const LAYERLIST_HIDE_KEY = 'bnd:hide-layerlist';
export const QUERYABLE_KEY = 'bnd:queryable';
export const QUERY_ENDPOINT_KEY = 'bnd:query-endpoint';
export const QUERY_TYPE_KEY = 'bnd:query-type';
export const QUERY_PARAMS_KEY = 'bnd:query-params';
export const GEOMETRY_NAME_KEY = 'bnd:geometry-name';
export const MIN_ZOOM_KEY = 'bnd:minzoom';
export const MAX_ZOOM_KEY = 'bnd:maxzoom';
export const QUERY_TYPE_WFS = 'WFS';
export const DEFAULT_ZOOM = {
MIN: 0,
MAX: 22,
};
export const INTERACTIONS = {
modify: 'Modify',
select: 'Select',
point: 'Point',
line: 'LineString',
polygon: 'Polygon',
box: 'Box',
measure_point: 'measure:Point',
measure_line: 'measure:LineString',
measure_polygon: 'measure:Polygon',
};
// useful for deciding what is or is not a drawing interaction
INTERACTIONS.drawing = [
INTERACTIONS.point,
INTERACTIONS.line,
INTERACTIONS.polygon,
INTERACTIONS.box
];
// determine which is a measuring interaction
INTERACTIONS.measuring = [
INTERACTIONS.measure_point,
INTERACTIONS.measure_line,
INTERACTIONS.measure_polygon,
];
/** Export all the const's in a convenient Object.
*/
export default {
LAYER_VERSION_KEY,
SOURCE_VERSION_KEY,
TITLE_KEY,
TIME_KEY,
GROUP_KEY,
GROUPS_KEY,
TIME_START_KEY,
TIME_END_KEY,
DATA_VERSION_KEY,
INTERACTIONS,
DEFAULT_ZOOM,
};
| jjmulenex/sdk | src/constants.js | JavaScript | apache-2.0 | 2,452 |
// Copyright (c) 2016-2022 James Skimming. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
namespace Castle.DynamicProxy;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Reflection;
/// <summary>
/// Intercepts method invocations and determines if is an asynchronous method.
/// </summary>
public class AsyncDeterminationInterceptor : IInterceptor
{
private static readonly MethodInfo HandleAsyncMethodInfo =
typeof(AsyncDeterminationInterceptor)
.GetMethod(nameof(HandleAsyncWithResult), BindingFlags.Static | BindingFlags.NonPublic)!;
private static readonly ConcurrentDictionary<Type, GenericAsyncHandler> GenericAsyncHandlers =
new ConcurrentDictionary<Type, GenericAsyncHandler>();
/// <summary>
/// Initializes a new instance of the <see cref="AsyncDeterminationInterceptor"/> class.
/// </summary>
/// <param name="asyncInterceptor">The underlying <see cref="AsyncInterceptor"/>.</param>
public AsyncDeterminationInterceptor(IAsyncInterceptor asyncInterceptor)
{
AsyncInterceptor = asyncInterceptor;
}
private delegate void GenericAsyncHandler(IInvocation invocation, IAsyncInterceptor asyncInterceptor);
private enum MethodType
{
Synchronous,
AsyncAction,
AsyncFunction,
}
/// <summary>
/// Gets the underlying async interceptor.
/// </summary>
public IAsyncInterceptor AsyncInterceptor { get; }
/// <summary>
/// Intercepts a method <paramref name="invocation"/>.
/// </summary>
/// <param name="invocation">The method invocation.</param>
[DebuggerStepThrough]
public virtual void Intercept(IInvocation invocation)
{
MethodType methodType = GetMethodType(invocation.Method.ReturnType);
switch (methodType)
{
case MethodType.AsyncAction:
AsyncInterceptor.InterceptAsynchronous(invocation);
return;
case MethodType.AsyncFunction:
GetHandler(invocation.Method.ReturnType).Invoke(invocation, AsyncInterceptor);
return;
default:
AsyncInterceptor.InterceptSynchronous(invocation);
return;
}
}
/// <summary>
/// Gets the <see cref="MethodType"/> based upon the <paramref name="returnType"/> of the method invocation.
/// </summary>
private static MethodType GetMethodType(Type returnType)
{
// If there's no return type, or it's not a task, then assume it's a synchronous method.
if (returnType == typeof(void) || !typeof(Task).IsAssignableFrom(returnType))
return MethodType.Synchronous;
// The return type is a task of some sort, so assume it's asynchronous
return returnType.GetTypeInfo().IsGenericType ? MethodType.AsyncFunction : MethodType.AsyncAction;
}
/// <summary>
/// Gets the <see cref="GenericAsyncHandler"/> for the method invocation <paramref name="returnType"/>.
/// </summary>
private static GenericAsyncHandler GetHandler(Type returnType)
{
GenericAsyncHandler handler = GenericAsyncHandlers.GetOrAdd(returnType, CreateHandler);
return handler;
}
/// <summary>
/// Creates the generic delegate for the <paramref name="returnType"/> method invocation.
/// </summary>
private static GenericAsyncHandler CreateHandler(Type returnType)
{
Type taskReturnType = returnType.GetGenericArguments()[0];
MethodInfo method = HandleAsyncMethodInfo.MakeGenericMethod(taskReturnType);
return (GenericAsyncHandler)method.CreateDelegate(typeof(GenericAsyncHandler));
}
/// <summary>
/// This method is created as a delegate and used to make the call to the generic
/// <see cref="IAsyncInterceptor.InterceptAsynchronous{T}"/> method.
/// </summary>
/// <typeparam name="TResult">The type of the <see cref="Task{T}"/> <see cref="Task{T}.Result"/> of the method
/// <paramref name="invocation"/>.</typeparam>
private static void HandleAsyncWithResult<TResult>(IInvocation invocation, IAsyncInterceptor asyncInterceptor)
{
asyncInterceptor.InterceptAsynchronous<TResult>(invocation);
}
}
| JSkimming/Castle.Core.AsyncInterceptor | src/Castle.Core.AsyncInterceptor/AsyncDeterminationInterceptor.cs | C# | apache-2.0 | 4,340 |
<!DOCTYPE html>
<html lang="zh_CN">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="description" content="writor" />
<meta name="author" content="writor.me" />
<title>登录</title>
<link rel="stylesheet" href="{{asset('/backend/js/jquery-ui/css/no-theme/jquery-ui-1.10.3.custom.min.css')}}" id="style-resource-1">
<link rel="stylesheet" href="{{asset('/backend/css/font-icons/entypo/css/entypo.css')}}" id="style-resource-2">
<link rel="stylesheet" href="{{asset('/backend/css/bootstrap.min.css')}}" id="style-resource-4">
<link rel="stylesheet" href="{{asset('/backend/css/core.css')}}" id="style-resource-5">
<link rel="stylesheet" href="{{asset('/backend/css/forms.css')}}" id="style-resource-7">
<link rel="stylesheet" href="{{asset('/backend/css/custom.css')}}" id="style-resource-8">
<script src="{{asset('/backend/js/jquery-1.11.0.min.js')}}"></script>
<script type="text/javascript">var baseurl = "{{ url('/') }}";</script>
<!--[if lt IE 9]>
<script src="{{asset('/backend/js/ie8-responsive-file-warning.js')}}"></script>
<![endif]-->
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
<script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body class="page-body login-page login-form-fall">
<div class="login-container">
<div class="login-header login-caret">
<div class="login-content">
<a href="http://github.com/Denniskevin" target="_blank" class="logo">
<h1>Denniskevin</h1>
</a>
<!-- progress bar indicator -->
<div class="login-progressbar-indicator">
<h3>43%</h3>
<span>登录中...</span>
</div>
</div>
</div>
<div class="login-progressbar">
<div></div>
</div>
<div class="login-form">
<div class="login-content">
<div class="form-login-error">
<h3>登录失败</h3>
<p>用户名或密码错误</p>
</div>
<form method="post" role="form" id="form_login">
<div class="form-group">
<div class="input-group">
<div class="input-group-addon"> <i class="entypo-user"></i></div>
<input type="text" class="form-control" name="username" id="username" placeholder="用户名" autocomplete="off" />
</div>
</div>
<div class="form-group">
<div class="input-group">
<div class="input-group-addon"> <i class="entypo-key"></i></div>
<input type="password" class="form-control" name="password" id="password" placeholder="密码" autocomplete="off" />
</div>
</div>
<div class="form-group">
<button type="submit" class="btn btn-primary btn-block btn-login">
登录
<i class="entypo-login"></i>
</button>
</div>
</form>
<div class="login-bottom-links">
<a href="{{url('/admin/auth/remind')}}" class="link">忘记密码?</a>
<br />
<a href="http://wiki.knewmei.com" target="_blank">关于</a>
-
<a href="http://knewmei.com" target="_blank">联系作者</a>
</div>
</div>
</div>
</div>
<script src="{{asset('/backend/js/gsap/main-gsap.js')}}" id="script-resource-1"></script>
<script src="{{asset('/backend/js/jquery-ui/js/jquery-ui-1.10.3.minimal.min.js')}}" id="script-resource-2"></script>
<script src="{{asset('/backend/js/bootstrap.js')}}" id="script-resource-3"></script>
<script src="{{asset('/backend/js/joinable.js')}}" id="script-resource-4"></script>
<script src="{{asset('/backend/js/resizeable.js')}}" id="script-resource-5"></script>
<script src="{{asset('/backend/js/api.js')}}" id="script-resource-6"></script>
<script src="{{asset('/backend/js/cookies.min.js')}}" id="script-resource-7"></script>
<script src="{{asset('/backend/js/jquery.validate.min.js')}}" id="script-resource-8"></script>
<script src="{{asset('/backend/js/login.js')}}" id="script-resource-9"></script>
</body>
</html> | Denniskevin/dennis-kevin | app/views/backend/pages/login.blade.php | PHP | apache-2.0 | 4,861 |
//
// Copyright 2016 R. Stanley Hum <r.stanley.hum@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using HandbookApp.Utilities;
using HandbookApp.ViewModels;
using ReactiveUI;
using Xamarin.Forms;
namespace HandbookApp.Views
{
public class UnauthorizedErrorPage : BasePage<UnauthorizedErrorViewModel>
{
private Button goBackMainPageButton;
private Label titleLabel;
private Label instructionsLabel;
protected override void SetupViewElements()
{
base.SetupViewElements();
NavigationPage.SetHasBackButton(this, false);
NavigationPage.SetHasNavigationBar(this, false);
Content = new StackLayout {
Padding = new Thickness(20d),
Children = {
(titleLabel = new Label { Text = "Unauthorized Access Error", HorizontalOptions = LayoutOptions.Center }),
(instructionsLabel = new Label { Text = "You are reaching this page because the Content Server does not recognize who you are. You will need to login again with the account that you first logged in with. Normally this does not happen if you check the app once a day. If you check your app once a day, please contact your app administrator.", Margin = new Thickness(5, 20, 5, 5) }),
(goBackMainPageButton = new Button { Text = "Continue" })
}
};
}
protected override void SetupObservables()
{
this.BindCommand(ViewModel, vm => vm.ClearUnauthorized, c => c.goBackMainPageButton);
}
}
}
| humrs/HandbookApp | HandbookApp/HandbookApp/Views/UnauthorizedErrorPage.cs | C# | apache-2.0 | 2,158 |
/*
* Copyright (c) 2014. by Robusta Code and individual contributors
* as indicated by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.robusta.rra.controller;
/**
* @author Nicolas Zozol
*/
public class ControllerException extends RuntimeException {
/**
* @param t
*/
public ControllerException( Throwable t ) {
super( t );
}
/**
* @param message
*/
public ControllerException( String message ) {
super( message );
}
/**
* @param message
* @param ex
*/
public ControllerException( String message, Exception ex ) {
super( message, ex );
}
}
| robusta-code/rra | src/main/java/io/robusta/rra/controller/ControllerException.java | Java | apache-2.0 | 1,527 |
package org.apache.lucene.search;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.index.*;
final class PhrasePositions {
int doc; // current doc
int position; // position in doc
int count; // remaining pos in this doc
int offset; // position in phrase
TermPositions tp; // stream of positions
PhrasePositions next; // used to make lists
PhrasePositions(TermPositions t, int o) {
tp = t;
offset = o;
}
final boolean next() throws IOException { // increments to next doc
if (!tp.next()) {
tp.close(); // close stream
doc = Integer.MAX_VALUE; // sentinel value
return false;
}
doc = tp.doc();
position = 0;
return true;
}
final boolean skipTo(int target) throws IOException {
if (!tp.skipTo(target)) {
tp.close(); // close stream
doc = Integer.MAX_VALUE; // sentinel value
return false;
}
doc = tp.doc();
position = 0;
return true;
}
final void firstPosition() throws IOException {
count = tp.freq(); // read first pos
nextPosition();
}
final boolean nextPosition() throws IOException {
if (count-- > 0) { // read subsequent pos's
position = tp.nextPosition() - offset;
return true;
} else
return false;
}
}
| lpxz/grail-lucene358684 | src/java/org/apache/lucene/search/PhrasePositions.java | Java | apache-2.0 | 1,948 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "udf/udf.h"
#include <algorithm>
#include <iostream>
#include <sstream>
#include <assert.h>
#include <gutil/port.h> // for aligned_malloc
#ifndef IMPALA_UDF_SDK_BUILD
#include "util/error-util.h"
#endif
// Be careful what this includes since this needs to be linked into the UDF's
// binary. For example, it would be unfortunate if they had a random dependency
// on libhdfs.
#include "udf/udf-internal.h"
#if defined(IMPALA_UDF_SDK_BUILD) && IMPALA_UDF_SDK_BUILD
// For the SDK build, we are building the .lib that the developers would use to
// write UDFs. They want to link against this to run their UDFs in a test environment.
// Pulling in free-pool is very undesirable since it pulls in many other libraries.
// Instead, we'll implement a dummy version that is not used.
// When they build their library to a .so, they'd use the version of FunctionContext
// in the main binary, which does include FreePool.
#define VLOG_ROW while(false) std::cout
#define VLOG_ROW_IS_ON (false)
namespace impala {
class MemTracker {
public:
void Consume(int64_t bytes) { }
void Release(int64_t bytes) { }
};
class FreePool {
public:
FreePool(MemPool*) : net_allocations_(0) { }
uint8_t* Allocate(int byte_size) {
++net_allocations_;
return reinterpret_cast<uint8_t*>(malloc(byte_size));
}
uint8_t* Reallocate(uint8_t* ptr, int byte_size) {
return reinterpret_cast<uint8_t*>(realloc(ptr, byte_size));
}
void Free(uint8_t* ptr) {
--net_allocations_;
free(ptr);
}
MemTracker* mem_tracker() { return &mem_tracker_; }
int64_t net_allocations() const { return net_allocations_; }
private:
MemTracker mem_tracker_;
int64_t net_allocations_;
};
class RuntimeState {
public:
void SetQueryStatus(const std::string& error_msg) {
assert(false);
}
bool abort_on_error() const {
assert(false);
return false;
}
bool decimal_v2() const {
assert(false);
return false;
}
bool LogError(const std::string& error) {
assert(false);
return false;
}
const std::string connected_user() const { return ""; }
const std::string GetEffectiveUser() const { return ""; }
};
}
#else
#include "exprs/anyval-util.h"
#include "runtime/free-pool.h"
#include "runtime/mem-tracker.h"
#include "runtime/runtime-state.h"
#endif
#include "common/names.h"
#include "common/compiler-util.h"
using namespace impala;
using namespace impala_udf;
using std::pair;
const int FunctionContextImpl::VARARGS_BUFFER_ALIGNMENT;
const char* FunctionContextImpl::LLVM_FUNCTIONCONTEXT_NAME =
"class.impala_udf::FunctionContext";
const char* FunctionContextImpl::GET_CONST_FN_ATTR_SYMBOL =
"_ZN6impala19FunctionContextImpl14GetConstFnAttrENS0_11ConstFnAttrEi";
static const int MAX_WARNINGS = 1000;
static_assert(__BYTE_ORDER == __LITTLE_ENDIAN,
"DecimalVal memory layout assumes little-endianness");
FunctionContext* FunctionContextImpl::CreateContext(RuntimeState* state, MemPool* pool,
const FunctionContext::TypeDesc& return_type,
const vector<FunctionContext::TypeDesc>& arg_types,
int varargs_buffer_size, bool debug) {
FunctionContext::TypeDesc invalid_type;
invalid_type.type = FunctionContext::INVALID_TYPE;
invalid_type.precision = 0;
invalid_type.scale = 0;
return FunctionContextImpl::CreateContext(state, pool, invalid_type, return_type,
arg_types, varargs_buffer_size, debug);
}
FunctionContext* FunctionContextImpl::CreateContext(RuntimeState* state, MemPool* pool,
const FunctionContext::TypeDesc& intermediate_type,
const FunctionContext::TypeDesc& return_type,
const vector<FunctionContext::TypeDesc>& arg_types,
int varargs_buffer_size, bool debug) {
impala_udf::FunctionContext* ctx = new impala_udf::FunctionContext();
ctx->impl_->state_ = state;
ctx->impl_->pool_ = new FreePool(pool);
ctx->impl_->intermediate_type_ = intermediate_type;
ctx->impl_->return_type_ = return_type;
ctx->impl_->arg_types_ = arg_types;
ctx->impl_->varargs_buffer_ = reinterpret_cast<uint8_t*>(
aligned_malloc(varargs_buffer_size, VARARGS_BUFFER_ALIGNMENT));
ctx->impl_->varargs_buffer_size_ = varargs_buffer_size;
ctx->impl_->debug_ = debug;
VLOG_ROW << "Created FunctionContext: " << ctx << " with pool " << ctx->impl_->pool_;
return ctx;
}
FunctionContext* FunctionContextImpl::Clone(MemPool* pool) {
impala_udf::FunctionContext* new_context =
CreateContext(state_, pool, intermediate_type_, return_type_, arg_types_,
varargs_buffer_size_, debug_);
new_context->impl_->constant_args_ = constant_args_;
new_context->impl_->fragment_local_fn_state_ = fragment_local_fn_state_;
return new_context;
}
FunctionContext::FunctionContext() : impl_(new FunctionContextImpl(this)) {
}
FunctionContext::~FunctionContext() {
assert(impl_->closed_ && "FunctionContext wasn't closed!");
delete impl_->pool_;
delete impl_;
}
FunctionContextImpl::FunctionContextImpl(FunctionContext* parent)
: varargs_buffer_(NULL),
varargs_buffer_size_(0),
context_(parent),
pool_(NULL),
state_(NULL),
debug_(false),
version_(FunctionContext::v1_3),
num_warnings_(0),
num_updates_(0),
num_removes_(0),
thread_local_fn_state_(NULL),
fragment_local_fn_state_(NULL),
external_bytes_tracked_(0),
closed_(false) {
}
void FunctionContextImpl::Close() {
if (closed_) return;
// Free local allocations first so we can detect leaks through any remaining allocations
// (local allocations cannot be leaked, at least not by the UDF)
FreeLocalAllocations();
stringstream error_ss;
if (!debug_) {
if (pool_->net_allocations() > 0) {
error_ss << "Memory leaked via FunctionContext::Allocate() "
<< "or FunctionContext::AllocateLocal()";
} else if (pool_->net_allocations() < 0) {
error_ss << "FunctionContext::Free() called on buffer that was already freed or "
"was not allocated.";
}
} else if (!allocations_.empty()) {
int bytes = 0;
for (map<uint8_t*, int>::iterator i = allocations_.begin();
i != allocations_.end(); ++i) {
bytes += i->second;
}
error_ss << bytes << " bytes leaked via FunctionContext::Allocate()";
allocations_.clear();
}
if (external_bytes_tracked_ > 0) {
if (!error_ss.str().empty()) error_ss << ", ";
error_ss << external_bytes_tracked_
<< " bytes leaked via FunctionContext::TrackAllocation()";
// This isn't ideal because the memory is still leaked, but don't track it so our
// accounting stays sane.
// TODO: we need to modify the memtrackers to allow leaked user-allocated memory.
context_->Free(external_bytes_tracked_);
}
if (!error_ss.str().empty()) {
// Treat memory leaks as errors in the SDK build so they trigger test failures, but
// don't blow up actual queries due to leaks (unless abort_on_error is true).
// TODO: revisit abort_on_error case. Setting the error won't do anything in close.
if (state_ == NULL || state_->abort_on_error()) {
context_->SetError(error_ss.str().c_str());
} else {
context_->AddWarning(error_ss.str().c_str());
}
}
free(varargs_buffer_);
varargs_buffer_ = NULL;
closed_ = true;
}
FunctionContext::ImpalaVersion FunctionContext::version() const {
return impl_->version_;
}
const char* FunctionContext::user() const {
if (impl_->state_ == NULL) return NULL;
return impl_->state_->connected_user().c_str();
}
const char* FunctionContext::effective_user() const {
if (impl_->state_ == NULL) return NULL;
return impl_->state_->GetEffectiveUser().c_str();
}
FunctionContext::UniqueId FunctionContext::query_id() const {
UniqueId id;
#if defined(IMPALA_UDF_SDK_BUILD) && IMPALA_UDF_SDK_BUILD
id.hi = id.lo = 0;
#else
id.hi = impl_->state_->query_id().hi;
id.lo = impl_->state_->query_id().lo;
#endif
return id;
}
bool FunctionContext::has_error() const {
return !impl_->error_msg_.empty();
}
const char* FunctionContext::error_msg() const {
if (has_error()) return impl_->error_msg_.c_str();
return NULL;
}
inline bool FunctionContextImpl::CheckAllocResult(const char* fn_name,
uint8_t* buf, int64_t byte_size) {
if (UNLIKELY(buf == NULL)) {
stringstream ss;
ss << string(fn_name) << "() failed to allocate " << byte_size << " bytes.";
context_->SetError(ss.str().c_str());
return false;
}
CheckMemLimit(fn_name, byte_size);
return true;
}
inline void FunctionContextImpl::CheckMemLimit(const char* fn_name,
int64_t byte_size) {
#ifndef IMPALA_UDF_SDK_BUILD
MemTracker* mem_tracker = pool_->mem_tracker();
if (mem_tracker->AnyLimitExceeded()) {
ErrorMsg msg = ErrorMsg(TErrorCode::UDF_MEM_LIMIT_EXCEEDED, string(fn_name));
state_->SetMemLimitExceeded(mem_tracker, byte_size, &msg);
}
#endif
}
uint8_t* FunctionContext::Allocate(int byte_size) noexcept {
assert(!impl_->closed_);
uint8_t* buffer = impl_->pool_->Allocate(byte_size);
if (UNLIKELY(!impl_->CheckAllocResult("FunctionContext::Allocate",
buffer, byte_size))) {
return NULL;
}
if (UNLIKELY(impl_->debug_)) {
impl_->allocations_[buffer] = byte_size;
memset(buffer, 0xff, byte_size);
}
VLOG_ROW << "Allocate: FunctionContext=" << this
<< " size=" << byte_size
<< " result=" << reinterpret_cast<void*>(buffer);
return buffer;
}
uint8_t* FunctionContext::Reallocate(uint8_t* ptr, int byte_size) noexcept {
assert(!impl_->closed_);
VLOG_ROW << "Reallocate: FunctionContext=" << this
<< " size=" << byte_size
<< " ptr=" << reinterpret_cast<void*>(ptr);
uint8_t* new_ptr = impl_->pool_->Reallocate(ptr, byte_size);
if (UNLIKELY(!impl_->CheckAllocResult("FunctionContext::Reallocate",
new_ptr, byte_size))) {
return NULL;
}
if (UNLIKELY(impl_->debug_)) {
impl_->allocations_.erase(ptr);
impl_->allocations_[new_ptr] = byte_size;
}
VLOG_ROW << "FunctionContext=" << this
<< " reallocated: " << reinterpret_cast<void*>(new_ptr);
return new_ptr;
}
void FunctionContext::Free(uint8_t* buffer) noexcept {
assert(!impl_->closed_);
if (buffer == NULL) return;
VLOG_ROW << "Free: FunctionContext=" << this << " "
<< reinterpret_cast<void*>(buffer);
if (impl_->debug_) {
map<uint8_t*, int>::iterator it = impl_->allocations_.find(buffer);
if (it != impl_->allocations_.end()) {
// fill in garbage value into the buffer to increase the chance of detecting misuse
memset(buffer, 0xff, it->second);
impl_->allocations_.erase(it);
impl_->pool_->Free(buffer);
} else {
SetError("FunctionContext::Free() called on buffer that is already freed or was "
"not allocated.");
}
} else {
impl_->pool_->Free(buffer);
}
}
void FunctionContext::TrackAllocation(int64_t bytes) {
assert(!impl_->closed_);
impl_->external_bytes_tracked_ += bytes;
impl_->pool_->mem_tracker()->Consume(bytes);
impl_->CheckMemLimit("FunctionContext::TrackAllocation", bytes);
}
void FunctionContext::Free(int64_t bytes) {
assert(!impl_->closed_);
if (bytes > impl_->external_bytes_tracked_) {
stringstream ss;
ss << "FunctionContext::Free() called with " << bytes << " bytes, but only "
<< impl_->external_bytes_tracked_ << " bytes are tracked via "
<< "FunctionContext::TrackAllocation()";
SetError(ss.str().c_str());
return;
}
impl_->external_bytes_tracked_ -= bytes;
impl_->pool_->mem_tracker()->Release(bytes);
}
void FunctionContext::SetError(const char* error_msg) {
assert(!impl_->closed_);
if (impl_->error_msg_.empty()) {
impl_->error_msg_ = error_msg;
stringstream ss;
ss << "UDF ERROR: " << error_msg;
if (impl_->state_ != NULL) impl_->state_->SetQueryStatus(ss.str());
}
}
// TODO: is there a way to tell the user the expr in a reasonable way?
// Plumb the ToSql() from the FE?
// TODO: de-dup warnings
bool FunctionContext::AddWarning(const char* warning_msg) {
assert(!impl_->closed_);
if (impl_->num_warnings_++ >= MAX_WARNINGS) return false;
stringstream ss;
ss << "UDF WARNING: " << warning_msg;
if (impl_->state_ != NULL) {
#ifndef IMPALA_UDF_SDK_BUILD
// If this is called while the query is being closed, the runtime state log will have
// already been displayed to the user. Also log the warning so there's some chance
// the user will actually see it.
// TODO: somehow print the full error log in the shell? This is a problem for any
// function using LogError() during close.
LOG(WARNING) << ss.str();
return impl_->state_->LogError(ErrorMsg(TErrorCode::GENERAL, ss.str()));
#else
// In case of the SDK build, we simply, forward this call to a dummy method
return impl_->state_->LogError(ss.str());
#endif
} else {
cerr << ss.str() << endl;
return true;
}
}
void FunctionContext::SetFunctionState(FunctionStateScope scope, void* ptr) {
assert(!impl_->closed_);
switch (scope) {
case THREAD_LOCAL:
impl_->thread_local_fn_state_ = ptr;
break;
case FRAGMENT_LOCAL:
impl_->fragment_local_fn_state_ = ptr;
break;
default:
stringstream ss;
ss << "Unknown FunctionStateScope: " << scope;
SetError(ss.str().c_str());
}
}
uint8_t* FunctionContextImpl::AllocateLocal(int64_t byte_size) noexcept {
assert(!closed_);
uint8_t* buffer = pool_->Allocate(byte_size);
if (UNLIKELY(!CheckAllocResult("FunctionContextImpl::AllocateLocal",
buffer, byte_size))) {
return NULL;
}
local_allocations_.push_back(buffer);
VLOG_ROW << "Allocate Local: FunctionContext=" << context_
<< " size=" << byte_size
<< " result=" << reinterpret_cast<void*>(buffer);
return buffer;
}
uint8_t* FunctionContextImpl::ReallocateLocal(uint8_t* ptr, int64_t byte_size) noexcept {
assert(!closed_);
uint8_t* new_ptr = pool_->Reallocate(ptr, byte_size);
if (UNLIKELY(!CheckAllocResult("FunctionContextImpl::ReallocateLocal",
new_ptr, byte_size))) {
return NULL;
}
if (new_ptr != ptr) {
auto v = std::find(local_allocations_.rbegin(), local_allocations_.rend(), ptr);
assert(v != local_allocations_.rend());
// Avoid perf issue; move to end of local allocations on any reallocation and
// always start the search from there.
if (v != local_allocations_.rbegin()) {
*v = *local_allocations_.rbegin();
}
*local_allocations_.rbegin() = new_ptr;
}
VLOG_ROW << "Reallocate Local: FunctionContext=" << context_
<< " ptr=" << reinterpret_cast<void*>(ptr) << " size=" << byte_size
<< " result=" << reinterpret_cast<void*>(new_ptr);
return new_ptr;
}
void FunctionContextImpl::FreeLocalAllocations() noexcept {
assert(!closed_);
if (VLOG_ROW_IS_ON) {
stringstream ss;
ss << "Free local allocations: FunctionContext=" << context_
<< " pool=" << pool_ << endl;
for (int i = 0; i < local_allocations_.size(); ++i) {
ss << " " << reinterpret_cast<void*>(local_allocations_[i]) << endl;
}
VLOG_ROW << ss.str();
}
for (int i = 0; i < local_allocations_.size(); ++i) {
pool_->Free(local_allocations_[i]);
}
local_allocations_.clear();
}
void FunctionContextImpl::SetConstantArgs(vector<AnyVal*>&& constant_args) {
constant_args_ = constant_args;
}
void FunctionContextImpl::SetNonConstantArgs(
vector<pair<Expr*, AnyVal*>>&& non_constant_args) {
non_constant_args_ = non_constant_args;
}
// Note: this function crashes LLVM's JIT in expr-test if it's xcompiled. Do not move to
// expr-ir.cc. This could probably use further investigation.
StringVal::StringVal(FunctionContext* context, int len) noexcept : len(len), ptr(NULL) {
if (UNLIKELY(len > StringVal::MAX_LENGTH)) {
context->SetError("String length larger than allowed limit of "
"1 GB character data.");
len = 0;
is_null = true;
} else {
ptr = context->impl()->AllocateLocal(len);
if (UNLIKELY(ptr == NULL && len > 0)) {
#ifndef IMPALA_UDF_SDK_BUILD
assert(!context->impl()->state()->GetQueryStatus().ok());
#endif
len = 0;
is_null = true;
}
}
}
StringVal StringVal::CopyFrom(FunctionContext* ctx, const uint8_t* buf, size_t len) noexcept {
StringVal result(ctx, len);
if (LIKELY(!result.is_null)) {
memcpy(result.ptr, buf, len);
}
return result;
}
bool StringVal::Resize(FunctionContext* ctx, int new_len) noexcept {
if (UNLIKELY(new_len > StringVal::MAX_LENGTH)) {
ctx->SetError("String length larger than allowed limit of 1 GB character data.");
len = 0;
is_null = true;
return false;
}
auto* new_ptr = ctx->impl()->ReallocateLocal(ptr, new_len);
if (new_ptr != nullptr) {
ptr = new_ptr;
len = new_len;
return true;
}
return false;
}
// TODO: why doesn't libudasample.so build if this in udf-ir.cc?
const FunctionContext::TypeDesc* FunctionContext::GetArgType(int arg_idx) const {
if (arg_idx < 0 || arg_idx >= impl_->arg_types_.size()) return NULL;
return &impl_->arg_types_[arg_idx];
}
static int GetTypeByteSize(const FunctionContext::TypeDesc& type) {
#if defined(IMPALA_UDF_SDK_BUILD) && IMPALA_UDF_SDK_BUILD
return 0;
#else
return AnyValUtil::TypeDescToColumnType(type).GetByteSize();
#endif
}
int FunctionContextImpl::GetConstFnAttr(FunctionContextImpl::ConstFnAttr t, int i) {
return GetConstFnAttr(state_, return_type_, arg_types_, t, i);
}
int FunctionContextImpl::GetConstFnAttr(const RuntimeState* state,
const FunctionContext::TypeDesc& return_type,
const vector<FunctionContext::TypeDesc>& arg_types,
ConstFnAttr t, int i) {
switch (t) {
case RETURN_TYPE_SIZE:
assert(i == -1);
return GetTypeByteSize(return_type);
case RETURN_TYPE_PRECISION:
assert(i == -1);
assert(return_type.type == FunctionContext::TYPE_DECIMAL);
return return_type.precision;
case RETURN_TYPE_SCALE:
assert(i == -1);
assert(return_type.type == FunctionContext::TYPE_DECIMAL);
return return_type.scale;
case ARG_TYPE_SIZE:
assert(i >= 0);
assert(i < arg_types.size());
return GetTypeByteSize(arg_types[i]);
case ARG_TYPE_PRECISION:
assert(i >= 0);
assert(i < arg_types.size());
assert(arg_types[i].type == FunctionContext::TYPE_DECIMAL);
return arg_types[i].precision;
case ARG_TYPE_SCALE:
assert(i >= 0);
assert(i < arg_types.size());
assert(arg_types[i].type == FunctionContext::TYPE_DECIMAL);
return arg_types[i].scale;
case DECIMAL_V2:
return state->decimal_v2();
default:
assert(false);
return -1;
}
}
| michaelhkw/incubator-impala | be/src/udf/udf.cc | C++ | apache-2.0 | 19,483 |
<?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/common/criteria.proto
namespace Google\Ads\GoogleAds\V9\Common;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* A placement criterion. This can be used to modify bids for sites when
* targeting the content network.
*
* Generated from protobuf message <code>google.ads.googleads.v9.common.PlacementInfo</code>
*/
class PlacementInfo extends \Google\Protobuf\Internal\Message
{
/**
* URL of the placement.
* For example, "http://www.domain.com".
*
* Generated from protobuf field <code>optional string url = 2;</code>
*/
protected $url = null;
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type string $url
* URL of the placement.
* For example, "http://www.domain.com".
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Ads\GoogleAds\V9\Common\Criteria::initOnce();
parent::__construct($data);
}
/**
* URL of the placement.
* For example, "http://www.domain.com".
*
* Generated from protobuf field <code>optional string url = 2;</code>
* @return string
*/
public function getUrl()
{
return isset($this->url) ? $this->url : '';
}
public function hasUrl()
{
return isset($this->url);
}
public function clearUrl()
{
unset($this->url);
}
/**
* URL of the placement.
* For example, "http://www.domain.com".
*
* Generated from protobuf field <code>optional string url = 2;</code>
* @param string $var
* @return $this
*/
public function setUrl($var)
{
GPBUtil::checkString($var, True);
$this->url = $var;
return $this;
}
}
| googleads/google-ads-php | src/Google/Ads/GoogleAds/V9/Common/PlacementInfo.php | PHP | apache-2.0 | 1,992 |
/*
* ../../../..//extensions/a11y/mathmaps/es/symbols/latin-upper-double-accent.js
*
* Copyright (c) 2009-2018 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
[
{ locale: "es" },
{ key: "1E08", mappings: { default: { default: "mayúscula C con cedilla y agudo" } }, category: "Lu" },
{ key: "1E14", mappings: { default: { default: "mayúscula E con macrón y grave" } }, category: "Lu" },
{ key: "1E16", mappings: { default: { default: "mayúscula E con macrón y agudo" } }, category: "Lu" },
{ key: "1E1C", mappings: { default: { default: "mayúscula E with cedilla and breve" } }, category: "Lu" },
{ key: "1E2E", mappings: { default: { default: "mayúscula I con diéresis y agudo" } }, category: "Lu" },
{ key: "1E38", mappings: { default: { default: "mayúscula L con punto debajo y macrón" } }, category: "Lu" },
{ key: "1E4C", mappings: { default: { default: "mayúscula O con tilde y acute" } }, category: "Lu" },
{ key: "1E4E", mappings: { default: { default: "mayúscula O con tilde y diéresis" } }, category: "Lu" },
{ key: "1E50", mappings: { default: { default: "mayúscula O con macrón y grave" } }, category: "Lu" },
{ key: "1E52", mappings: { default: { default: "mayúscula O con macrón y agudo" } }, category: "Lu" },
{ key: "1E5C", mappings: { default: { default: "mayúscula R con punto debajo y macrón" } }, category: "Lu" },
{ key: "1E64", mappings: { default: { default: "mayúscula S con agudo y punto arriba" } }, category: "Lu" },
{ key: "1E66", mappings: { default: { default: "mayúscula S con carón y punto arriba" } }, category: "Lu" },
{ key: "1E68", mappings: { default: { default: "mayúscula S con punto debajo y punto arriba" } }, category: "Lu" },
{ key: "1E78", mappings: { default: { default: "mayúscula U con tilde y agudo" } }, category: "Lu" },
{ key: "1E7A", mappings: { default: { default: "mayúscula U con macrón y diéresis" } }, category: "Lu" },
{ key: "1EA4", mappings: { default: { default: "mayúscula A con acento circunflejo y agudo" } }, category: "Lu" },
{ key: "1EA6", mappings: { default: { default: "mayúscula A con acento circunflejo y grave" } }, category: "Lu" },
{
key: "1EA8",
mappings: { default: { default: "mayúscula A con acento circunflejo y gancho arriba" } },
category: "Lu"
},
{ key: "1EAA", mappings: { default: { default: "mayúscula A con acento circunflejo y tilde" } }, category: "Lu" },
{
key: "1EAC",
mappings: { default: { default: "mayúscula A con acento circunflejo y punto debajo" } },
category: "Lu"
},
{ key: "1EAE", mappings: { default: { default: "mayúscula A con breve y agudo" } }, category: "Lu" },
{ key: "1EB0", mappings: { default: { default: "mayúscula A con breve y grave" } }, category: "Lu" },
{ key: "1EB2", mappings: { default: { default: "mayúscula A con breve y gancho arriba" } }, category: "Lu" },
{ key: "1EB4", mappings: { default: { default: "mayúscula A con breve y tilde" } }, category: "Lu" },
{ key: "1EB6", mappings: { default: { default: "mayúscula A con breve y punto debajo" } }, category: "Lu" },
{ key: "1EBE", mappings: { default: { default: "mayúscula E con acento circunflejo y agudo" } }, category: "Lu" },
{ key: "1EC0", mappings: { default: { default: "mayúscula E con acento circunflejo y grave" } }, category: "Lu" },
{
key: "1EC2",
mappings: { default: { default: "mayúscula E con acento circunflejo y gancho arriba" } },
category: "Lu"
},
{ key: "1EC4", mappings: { default: { default: "mayúscula E con acento circunflejo y tilde" } }, category: "Lu" },
{
key: "1EC6",
mappings: { default: { default: "mayúscula E con acento circunflejo y punto debajo" } },
category: "Lu"
},
{ key: "1ED0", mappings: { default: { default: "mayúscula O con acento circunflejo y agudo" } }, category: "Lu" },
{ key: "1ED2", mappings: { default: { default: "mayúscula O con acento circunflejo y grave" } }, category: "Lu" },
{
key: "1ED4",
mappings: { default: { default: "mayúscula O con acento circunflejo y gancho arriba" } },
category: "Lu"
},
{ key: "1ED6", mappings: { default: { default: "mayúscula O con acento circunflejo y tilde" } }, category: "Lu" },
{
key: "1ED8",
mappings: { default: { default: "mayúscula O con acento circunflejo y punto debajo" } },
category: "Lu"
},
{ key: "1EDA", mappings: { default: { default: "mayúscula O with horn and acute" } }, category: "Lu" },
{ key: "1EDC", mappings: { default: { default: "mayúscula O with horn and grave" } }, category: "Lu" },
{ key: "1EDE", mappings: { default: { default: "mayúscula O with horn and hook above" } }, category: "Lu" },
{ key: "1EE0", mappings: { default: { default: "mayúscula O with horn and tilde" } }, category: "Lu" },
{ key: "1EE2", mappings: { default: { default: "mayúscula O con cuerno y punto debajo" } }, category: "Lu" },
{ key: "1EE8", mappings: { default: { default: "mayúscula U con cuerno y agudo" } }, category: "Lu" },
{ key: "1EEA", mappings: { default: { default: "mayúscula U con cuerno y grave" } }, category: "Lu" },
{ key: "1EEC", mappings: { default: { default: "mayúscula U con cuerno y gancho arriba" } }, category: "Lu" },
{ key: "1EEE", mappings: { default: { default: "mayúscula U con cuerno y tilde" } }, category: "Lu" },
{ key: "1EF0", mappings: { default: { default: "mayúscula U con cuerno y punto debajo" } }, category: "Lu" }
];
| GerHobbelt/MathJax | extensions/a11y/mathmaps/es/symbols/latin-upper-double-accent.js | JavaScript | apache-2.0 | 6,021 |
using System;
using System.Collections.Generic;
using System.Text;
using System.Data;
using System.Linq;
using System.Data.SqlClient;
using Softv.Entities;
using Softv.Providers;
using SoftvConfiguration;
using Globals;
namespace Softv.DAO
{
/// <summary>
/// Class : Softv.DAO.CiudadServidorData
/// Generated by : Class Generator (c) 2014
/// Description : CiudadServidor Data Access Object
/// File : CiudadServidorDAO.cs
/// Creation date : 02/08/2016
/// Creation time : 12:53 p. m.
///</summary>
public class CiudadServidorData : CiudadServidorProvider
{
/// <summary>
///</summary>
/// <param name="CiudadServidor"> Object CiudadServidor added to List</param>
public override int AddCiudadServidor(CiudadServidorEntity entity_CiudadServidor)
{
int result=0;
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorAdd", connection);
AssingParameter(comandoSql, "@Id", null, pd: ParameterDirection.Output, IsKey: true);
AssingParameter(comandoSql,"@Ciudad",entity_CiudadServidor.Ciudad);
AssingParameter(comandoSql,"@IdPlaza",entity_CiudadServidor.IdPlaza);
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
result = ExecuteNonQuery(comandoSql);
}
catch (Exception ex)
{
throw new Exception("Error adding CiudadServidor " + ex.Message, ex);
}
finally
{
connection.Close();
}
result = (int)comandoSql.Parameters["@IdCiudadServidor"].Value;
}
return result;
}
/// <summary>
/// Deletes a CiudadServidor
///</summary>
/// <param name=""> Id to delete </param>
public override int DeleteCiudadServidor(int? Id)
{
int result=0;
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorDelete", connection);
AssingParameter(comandoSql,"@Id",Id);
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
result = ExecuteNonQuery(comandoSql);
}
catch (Exception ex)
{
throw new Exception("Error deleting CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection != null)
connection.Close();
}
}
return result;
}
/// <summary>
/// Edits a CiudadServidor
///</summary>
/// <param name="CiudadServidor"> Objeto CiudadServidor a editar </param>
public override int EditCiudadServidor(CiudadServidorEntity entity_CiudadServidor)
{
int result=0;
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorEdit", connection);
AssingParameter(comandoSql,"@Id",entity_CiudadServidor.Id);
AssingParameter(comandoSql,"@Ciudad",entity_CiudadServidor.Ciudad);
AssingParameter(comandoSql,"@IdPlaza",entity_CiudadServidor.IdPlaza);
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
result = int.Parse(ExecuteNonQuery(comandoSql).ToString());
}
catch (Exception ex)
{
throw new Exception("Error updating CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection != null)
connection.Close();
}
}
return result;
}
/// <summary>
/// Gets all CiudadServidor
///</summary>
public override List<CiudadServidorEntity> GetCiudadServidor()
{
List<CiudadServidorEntity> CiudadServidorList = new List<CiudadServidorEntity>();
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGet", connection);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql);
while (rd.Read())
{
CiudadServidorList.Add(GetCiudadServidorFromReader(rd));
}
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection!=null)
connection.Close();
if(rd != null)
rd.Close();
}
}
return CiudadServidorList;
}
/// <summary>
/// Gets all CiudadServidor by List<int>
///</summary>
public override List<CiudadServidorEntity> GetCiudadServidor(List<int> lid)
{
List<CiudadServidorEntity> CiudadServidorList = new List<CiudadServidorEntity>();
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
DataTable IdDT = BuildTableID(lid);
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetByIds", connection);
AssingParameter(comandoSql, "@IdTable", IdDT);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql);
while (rd.Read())
{
CiudadServidorList.Add(GetCiudadServidorFromReader(rd));
}
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection!=null)
connection.Close();
if(rd != null)
rd.Close();
}
}
return CiudadServidorList;
}
/// <summary>
/// Gets CiudadServidor by
///</summary>
public override CiudadServidorEntity GetCiudadServidorById(int? Id)
{
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetById", connection);
CiudadServidorEntity entity_CiudadServidor = null;
AssingParameter(comandoSql,"@Id", Id);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql, CommandBehavior.SingleRow);
if (rd.Read())
entity_CiudadServidor = GetCiudadServidorFromReader(rd);
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection!=null)
connection.Close();
if(rd != null)
rd.Close();
}
return entity_CiudadServidor;
}
}
public override List<CiudadServidorEntity> GetCiudadServidorByIdConexion(int? IdPlaza)
{
List<CiudadServidorEntity> CiudadServidorList = new List<CiudadServidorEntity>();
using(SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetByIdConexion", connection);
AssingParameter(comandoSql, "@IdPlaza", IdPlaza);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql);
while (rd.Read())
{
CiudadServidorList.Add(GetCiudadServidorFromReader(rd));
}
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if(connection!=null)
connection.Close();
if(rd != null)
rd.Close();
}
}
return CiudadServidorList;
}
/// <summary>
///Get CiudadServidor
///</summary>
public override SoftvList<CiudadServidorEntity> GetPagedList(int pageIndex, int pageSize)
{
SoftvList<CiudadServidorEntity> entities = new SoftvList<CiudadServidorEntity>();
using (SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetPaged", connection);
AssingParameter(comandoSql,"@pageIndex", pageIndex);
AssingParameter(comandoSql,"@pageSize", pageSize);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql);
while (rd.Read())
{
entities.Add(GetCiudadServidorFromReader(rd));
}
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if (connection != null)
connection.Close();
if (rd != null)
rd.Close();
}
entities.totalCount = GetCiudadServidorCount();
return entities ?? new SoftvList<CiudadServidorEntity>();
}
}
/// <summary>
///Get CiudadServidor
///</summary>
public override SoftvList<CiudadServidorEntity> GetPagedList(int pageIndex, int pageSize,String xml)
{
SoftvList<CiudadServidorEntity> entities = new SoftvList<CiudadServidorEntity>();
using (SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetPagedXml", connection);
AssingParameter(comandoSql,"@pageSize", pageSize);
AssingParameter(comandoSql,"@pageIndex", pageIndex);
AssingParameter(comandoSql,"@xml", xml);
IDataReader rd = null;
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
rd = ExecuteReader(comandoSql);
while (rd.Read())
{
entities.Add(GetCiudadServidorFromReader(rd));
}
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if (connection != null)
connection.Close();
if (rd != null)
rd.Close();
}
entities.totalCount = GetCiudadServidorCount(xml);
return entities ?? new SoftvList<CiudadServidorEntity>();
}
}
/// <summary>
///Get Count CiudadServidor
///</summary>
public int GetCiudadServidorCount()
{
int result = 0;
using (SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetCount", connection);
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
result = (int)ExecuteScalar(comandoSql);
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if (connection != null)
connection.Close();
}
}
return result;
}
/// <summary>
///Get Count CiudadServidor
///</summary>
public int GetCiudadServidorCount(String xml)
{
int result = 0;
using (SqlConnection connection = new SqlConnection(SoftvSettings.Settings.CiudadServidor.ConnectionString))
{
SqlCommand comandoSql = CreateCommand("Softv_CiudadServidorGetCountXml", connection);
AssingParameter(comandoSql,"@xml", xml);
try
{
if (connection.State == ConnectionState.Closed)
connection.Open();
result = (int)ExecuteScalar(comandoSql);
}
catch (Exception ex)
{
throw new Exception("Error getting data CiudadServidor " + ex.Message, ex);
}
finally
{
if (connection != null)
connection.Close();
}
}
return result;
}
#region Customs Methods
#endregion
}
}
| isfon/CallCenter | Encuestas/Softv/Softv.SQL/CiudadServidorData.cs | C# | apache-2.0 | 12,032 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.api.management.mbean;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.OpenDataException;
import javax.management.openmbean.OpenType;
import javax.management.openmbean.SimpleType;
import javax.management.openmbean.TabularType;
/**
* Various JMX openmbean types used by Camel.
*/
public final class CamelOpenMBeanTypes {
private CamelOpenMBeanTypes() {
}
public static TabularType listRestServicesTabularType() throws OpenDataException {
CompositeType ct = listRestServicesCompositeType();
return new TabularType(
"listRestServices", "Lists all the rest services in the registry", ct, new String[] { "url", "method" });
}
public static CompositeType listRestServicesCompositeType() throws OpenDataException {
return new CompositeType(
"rests", "Rest Services",
new String[] {
"url", "baseUrl", "basePath", "uriTemplate", "method", "consumes", "produces", "inType", "outType",
"state", "routeId", "description" },
new String[] {
"Url", "Base Url", "Base Path", "Uri Template", "Method", "Consumes", "Produces", "Input Type",
"Output Type", "State", "Route Id", "Description" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING,
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING });
}
public static TabularType listEndpointsTabularType() throws OpenDataException {
CompositeType ct = listEndpointsCompositeType();
return new TabularType("listEndpoints", "Lists all the endpoints in the registry", ct, new String[] { "url" });
}
public static CompositeType listEndpointsCompositeType() throws OpenDataException {
return new CompositeType(
"endpoints", "Endpoints",
new String[] { "url", "static", "dynamic" },
new String[] { "Url", "Static", "Dynamic" },
new OpenType[] { SimpleType.STRING, SimpleType.BOOLEAN, SimpleType.BOOLEAN });
}
public static TabularType listExchangeFactoryTabularType() throws OpenDataException {
CompositeType ct = listExchangeFactoryCompositeType();
return new TabularType("listExchangeFactory", "Lists all the exchange factories", ct, new String[] { "url" });
}
public static CompositeType listExchangeFactoryCompositeType() throws OpenDataException {
return new CompositeType(
"factories", "Factories",
new String[] { "url", "routeId", "capacity", "pooled", "created", "acquired", "released", "discarded" },
new String[] { "Url", "RouteId", "Capacity", "Pooled", "Created", "Acquired", "Released", "Discarded" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.INTEGER, SimpleType.INTEGER, SimpleType.LONG,
SimpleType.LONG, SimpleType.LONG, SimpleType.LONG });
}
public static TabularType listRuntimeEndpointsTabularType() throws OpenDataException {
CompositeType ct = listRuntimeEndpointsCompositeType();
return new TabularType(
"listRuntimeEndpoints", "Lists all the input and output endpoints gathered during runtime", ct,
new String[] { "index" });
}
public static CompositeType listRuntimeEndpointsCompositeType() throws OpenDataException {
return new CompositeType(
"endpoints", "Endpoints",
new String[] { "index", "url", "routeId", "direction", "static", "dynamic", "hits" },
new String[] { "Index", "Url", "Route Id", "Direction", "Static", "Dynamic", "Hits" },
new OpenType[] {
SimpleType.INTEGER, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.BOOLEAN,
SimpleType.BOOLEAN, SimpleType.LONG });
}
public static TabularType listComponentsTabularType() throws OpenDataException {
CompositeType ct = listComponentsCompositeType();
return new TabularType("listComponents", "Lists all the components", ct, new String[] { "name" });
}
public static CompositeType listComponentsCompositeType() throws OpenDataException {
return new CompositeType(
"components", "Components",
new String[] {
"name", "title", "syntax", "description", "label", "deprecated", "secret", "status", "type", "groupId",
"artifactId", "version" },
new String[] {
"Name", "Title", "Syntax", "Description", "Label", "Deprecated", "Secret", "Status", "Type", "GroupId",
"ArtifactId", "Version" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING,
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING });
}
public static TabularType listAwaitThreadsTabularType() throws OpenDataException {
CompositeType ct = listAwaitThreadsCompositeType();
return new TabularType("listAwaitThreads", "Lists blocked threads by the routing engine", ct, new String[] { "id" });
}
public static CompositeType listAwaitThreadsCompositeType() throws OpenDataException {
return new CompositeType(
"threads", "Threads",
new String[] { "id", "name", "exchangeId", "routeId", "nodeId", "duration" },
new String[] { "Thread Id", "Thread name", "ExchangeId", "RouteId", "NodeId", "Duration" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING });
}
public static TabularType listEipsTabularType() throws OpenDataException {
CompositeType ct = listEipsCompositeType();
return new TabularType("listEips", "Lists all the EIPs", ct, new String[] { "name" });
}
public static CompositeType listEipsCompositeType() throws OpenDataException {
return new CompositeType(
"eips", "EIPs",
new String[] { "name", "title", "description", "label", "status", "type" },
new String[] { "Name", "Title", "Description", "Label", "Status", "Type" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING });
}
public static TabularType listInflightExchangesTabularType() throws OpenDataException {
CompositeType ct = listInflightExchangesCompositeType();
return new TabularType("listInflightExchanges", "Lists inflight exchanges", ct, new String[] { "exchangeId" });
}
public static CompositeType listInflightExchangesCompositeType() throws OpenDataException {
return new CompositeType(
"exchanges", "Exchanges",
new String[] { "exchangeId", "fromRouteId", "routeId", "nodeId", "elapsed", "duration" },
new String[] { "Exchange Id", "From RouteId", "RouteId", "NodeId", "Elapsed", "Duration" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.STRING });
}
public static TabularType choiceTabularType() throws OpenDataException {
CompositeType ct = choiceCompositeType();
return new TabularType("choice", "Choice statistics", ct, new String[] { "predicate" });
}
public static CompositeType choiceCompositeType() throws OpenDataException {
return new CompositeType(
"predicates", "Predicates",
new String[] { "predicate", "language", "matches" },
new String[] { "Predicate", "Language", "Matches" },
new OpenType[] { SimpleType.STRING, SimpleType.STRING, SimpleType.LONG });
}
public static TabularType loadbalancerExceptionsTabularType() throws OpenDataException {
CompositeType ct = loadbalancerExceptionsCompositeType();
return new TabularType("exception", "Exception statistics", ct, new String[] { "exception" });
}
public static CompositeType loadbalancerExceptionsCompositeType() throws OpenDataException {
return new CompositeType(
"exceptions", "Exceptions",
new String[] { "exception", "failures" },
new String[] { "Exception", "Failures" },
new OpenType[] { SimpleType.STRING, SimpleType.LONG });
}
public static TabularType endpointsUtilizationTabularType() throws OpenDataException {
CompositeType ct = endpointsUtilizationCompositeType();
return new TabularType("endpointsUtilization", "Endpoint utilization statistics", ct, new String[] { "url" });
}
public static CompositeType endpointsUtilizationCompositeType() throws OpenDataException {
return new CompositeType(
"endpoints", "Endpoints",
new String[] { "url", "hits" },
new String[] { "Url", "Hits" },
new OpenType[] { SimpleType.STRING, SimpleType.LONG });
}
public static TabularType listTransformersTabularType() throws OpenDataException {
CompositeType ct = listTransformersCompositeType();
return new TabularType(
"listTransformers", "Lists all the transformers in the registry", ct, new String[] { "scheme", "from", "to" });
}
public static CompositeType listTransformersCompositeType() throws OpenDataException {
return new CompositeType(
"transformers", "Transformers",
new String[] { "scheme", "from", "to", "static", "dynamic", "description" },
new String[] { "Scheme", "From", "To", "Static", "Dynamic", "Description" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING,
SimpleType.BOOLEAN, SimpleType.BOOLEAN, SimpleType.STRING });
}
public static TabularType listValidatorsTabularType() throws OpenDataException {
CompositeType ct = listValidatorsCompositeType();
return new TabularType("listValidators", "Lists all the validators in the registry", ct, new String[] { "type" });
}
public static CompositeType listValidatorsCompositeType() throws OpenDataException {
return new CompositeType(
"validators", "Validators",
new String[] { "type", "static", "dynamic", "description" },
new String[] { "Type", "Static", "Dynamic", "Description" },
new OpenType[] { SimpleType.STRING, SimpleType.BOOLEAN, SimpleType.BOOLEAN, SimpleType.STRING });
}
public static CompositeType camelHealthDetailsCompositeType() throws OpenDataException {
return new CompositeType(
"healthDetails", "Health Details",
new String[] {
"id", "group", "state", "enabled", "message", "failureUri", "failureCount", "failureStackTrace",
"readiness", "liveness",
"interval", "successThreshold", "failureThreshold" },
new String[] {
"ID", "Group", "State", "Enabled", "Message", "Failure Uri", "Failure Count", "Failure StackTrace",
"Readiness", "Liveness",
"Interval", "Success Threshold", "Failure Threshold" },
new OpenType[] {
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.BOOLEAN, SimpleType.STRING,
SimpleType.STRING, SimpleType.INTEGER, SimpleType.STRING, SimpleType.BOOLEAN, SimpleType.BOOLEAN,
SimpleType.LONG, SimpleType.INTEGER, SimpleType.INTEGER });
}
public static TabularType camelHealthDetailsTabularType() throws OpenDataException {
CompositeType ct = camelHealthDetailsCompositeType();
return new TabularType("healthDetails", "Health Details", ct, new String[] { "id" });
}
public static CompositeType camelRoutePropertiesCompositeType() throws OpenDataException {
return new CompositeType(
"routeProperties", "Route Properties",
new String[] { "key", "value" },
new String[] { "Key", "Value" },
new OpenType[] { SimpleType.STRING, SimpleType.STRING });
}
public static TabularType camelRoutePropertiesTabularType() throws OpenDataException {
CompositeType ct = camelRoutePropertiesCompositeType();
return new TabularType("routeProperties", "Route Properties", ct, new String[] { "key" });
}
public static TabularType supervisingRouteControllerRouteStatusTabularType() throws OpenDataException {
CompositeType ct = supervisingRouteControllerRouteStatusCompositeType();
return new TabularType(
"routeStatus", "Lists detailed status about all the routes (incl failure details for routes failed to start)",
ct, new String[] { "index" });
}
public static CompositeType supervisingRouteControllerRouteStatusCompositeType() throws OpenDataException {
return new CompositeType(
"routes", "Routes",
new String[] {
"index", "routeId", "status", "supervising", "attempts", "elapsed", "last", "error", "stacktrace" },
new String[] {
"Index", "Route Id", "Status", "Supervising", "Attempts", "Elapsed", "Since Last Attempt", "Error",
"Stacktrace" },
new OpenType[] {
SimpleType.INTEGER, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.LONG,
SimpleType.STRING, SimpleType.STRING, SimpleType.STRING, SimpleType.STRING });
}
}
| pax95/camel | core/camel-management-api/src/main/java/org/apache/camel/api/management/mbean/CamelOpenMBeanTypes.java | Java | apache-2.0 | 15,425 |
/*
* Copyright 2012 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef _MATERIAL_HH_
#define _MATERIAL_HH_
#include <string>
#include <iostream>
#include "common/Color.hh"
namespace gazebo
{
namespace common
{
/// \addtogroup gazebo_common Common
/// \{
/// \class Material Material.hh common/common.hh
/// \brief Encapsulates description of a material
class Material
{
public: enum ShadeMode {FLAT, GOURAUD, PHONG, BLINN, SHADE_COUNT};
public: static std::string ShadeModeStr[SHADE_COUNT];
public: enum BlendMode {ADD, MODULATE, REPLACE, BLEND_COUNT};
public: static std::string BlendModeStr[BLEND_COUNT];
/// \brief Constructor
public: Material();
/// \brief Destructor
public: virtual ~Material();
/// \brief Create a material with a default color
/// \param[in] _clr Color of the material
public: Material(const Color &_clr);
/// \brief Get the name of the material
/// \return The name of the material
public: std::string GetName() const;
/// \brief Set a texture image
/// \param[in] _tex The name of the texture, which must be in Gazebo's
/// resource path
public: void SetTextureImage(const std::string &_tex);
/// \brief Set a texture image
/// \param[in] _tex The name of the texture
/// \param[in] _resourcePath Path which contains _tex
public: void SetTextureImage(const std::string &_tex,
const std::string &_resourcePath);
/// \brief Get a texture image
/// \return The name of the texture image (if one exists) or an empty
/// string
public: std::string GetTextureImage() const;
/// \brief Set the ambient color
/// \param[in] _clr The ambient color
public: void SetAmbient(const Color &_clr);
/// \brief Get the ambient color
/// \return The ambient color
public: Color GetAmbient() const;
/// \brief Set the diffuse color
/// \param[in] _clr The diffuse color
public: void SetDiffuse(const Color &_clr);
/// \brief Get the diffuse color
/// \return The diffuse color
public: Color GetDiffuse() const;
/// \brief Set the specular color
/// \param[in] _clr The specular color
public: void SetSpecular(const Color &_clr);
/// \brief Get the specular color
/// \return The specular color
public: Color GetSpecular() const;
/// \brief Set the emissive color
/// \param[in] _clr The emissive color
public: void SetEmissive(const Color &_clr);
/// \brief Get the emissive color
/// \return The emissive color
public: Color GetEmissive() const;
/// \brief Set the transparency percentage (0..1)
/// \param[in] _t The amount of transparency (0..1)
public: void SetTransparency(double _t);
/// \brief Get the transparency percentage (0..1)
/// \return The transparency percentage
public: double GetTransparency() const;
/// \brief Set the shininess
/// \param[in] _t The shininess value
public: void SetShininess(double _t);
/// \brief Get the shininess
/// \return The shininess value
public: double GetShininess() const;
/// \brief Set the blende factors. Will be interpreted as:
/// (texture * _srcFactor) + (scene_pixel * _dstFactor)
/// \param[in] _srcFactor The source factor
/// \param[in] _dstFactor The destination factor
public: void SetBlendFactors(double _srcFactor, double _dstFactor);
/// \brief Get the blend factors
/// \param[in] _srcFactor Source factor is returned in this variable
/// \param[in] _dstFactor Destination factor is returned in this variable
public: void GetBlendFactors(double &_srcFactor, double &_dstFactor);
/// \brief Set the blending mode
/// \param[in] _b the blend mode
public: void SetBlendMode(BlendMode _b);
/// \brief Get the blending mode
/// \return the blend mode
public: BlendMode GetBlendMode() const;
/// \brief Set the shading mode
/// param[in] the shading mode
public: void SetShadeMode(ShadeMode _b);
/// \brief Get the shading mode
/// \return the shading mode
public: ShadeMode GetShadeMode() const;
/// \brief Set the point size
/// \param[in] _size the size
public: void SetPointSize(double _size);
/// \brief Get the point size
/// \return the point size
public: double GetPointSize() const;
/// \brief Set depth write
/// \param[in] _value the depth write enabled state
public: void SetDepthWrite(bool _value);
/// \brief Get depth write
/// \return the depth write enabled state
public: bool GetDepthWrite() const;
/// \brief Set lighting enabled
/// \param[in] _value the lighting enabled state
public: void SetLighting(bool _value);
/// \brief Get lighting enabled
/// \return the lighting enabled state
public: bool GetLighting() const;
/// \brief Stream insertion operator
/// param[in] _out the output stream to extract from
/// param[out] _m the material information
public: friend std::ostream &operator<<(std::ostream &_out,
const gazebo::common::Material &_m)
{
_out << "Material:\n";
_out << " Name: " << _m.name << "\n";
_out << " Texture: " << _m.texImage << "\n";
_out << " Ambient: " << _m.ambient << "\n";
_out << " Diffuse: " << _m.diffuse << "\n";
_out << " Specular: " << _m.specular << "\n";
_out << " Emissive: " << _m.emissive << "\n";
_out << " Transparency: " << _m.transparency << "\n";
_out << " Shininess: " << _m.shininess << "\n";
_out << " BlendMode: " << BlendModeStr[_m.blendMode] << "\n";
_out << " ShadeMode: " << ShadeModeStr[_m.shadeMode] << "\n";
_out << " DepthWrite: " << _m.depthWrite << "\n";
return _out;
}
/// \brief the name of the material
protected: std::string name;
/// \brief the texture image file name
protected: std::string texImage;
/// \brief the ambient light color
protected: Color ambient;
/// \brief the diffuse ligth color
protected: Color diffuse;
/// \brief the specular light color
protected: Color specular;
/// \brief the emissive light color
protected: Color emissive;
/// \brief transparency value in the range 0 to 1
protected: double transparency;
/// \brief shininess value (0 to 1)
protected: double shininess;
/// \brief point size
protected: double pointSize;
/// \brief blend mode
protected: BlendMode blendMode;
/// \brief the shade mode
protected: ShadeMode shadeMode;
/// \brief the total number of instanciated Material instances
private: static unsigned int counter;
/// \brief flag to perform depth buffer write
private: bool depthWrite;
private: bool lighting;
/// \brief source blend factor
private: double srcBlendFactor;
/// \brief destination blend factor
private: double dstBlendFactor;
};
/// \}
}
}
#endif
| thomas-moulard/gazebo-deb | gazebo/common/Material.hh | C++ | apache-2.0 | 8,040 |
from __future__ import division, print_function, absolute_import, unicode_literals
import os
from mog_commons.case_class import CaseClass
from mog_commons.functional import omap
from javactl.util import normalize_path
class AppSetting(CaseClass):
def __init__(self, name=None, home=None, jar=None, entry_point=None, command=None, pid_file=None):
# constraints
assert name is not None, 'app.name is required'
assert home is not None, 'app.home is required'
assert os.path.isabs(home), 'app.home must be an absolute path'
assert (jar is None) != (command is None), 'Either app.jar or app.command but not both must be given'
assert jar is not None or entry_point is None, 'app.entry_point must be used with app.jar'
normalize = lambda p: normalize_path(p, home)
CaseClass.__init__(
self,
('name', name),
('home', home),
('jar', omap(normalize, jar)),
('entry_point', entry_point),
('command', omap(normalize, command)),
('pid_file', omap(normalize, pid_file))
)
def is_duplicate_allowed(self):
return self.pid_file is not None
def get_args(self, java_args):
if self.jar is not None:
if self.entry_point is not None:
return java_args + ['-cp', self.jar, self.entry_point]
else:
return java_args + ['-jar', self.jar]
else:
return [self.command]
| mogproject/javactl | src/javactl/setting/app_setting.py | Python | apache-2.0 | 1,507 |
// Copyright 2016 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package integration
import (
"context"
"crypto/tls"
"fmt"
"io/ioutil"
"log"
"math/rand"
"net"
"net/http"
"net/http/httptest"
"os"
"reflect"
"sort"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
"go.etcd.io/etcd/client"
"go.etcd.io/etcd/clientv3"
"go.etcd.io/etcd/embed"
"go.etcd.io/etcd/etcdserver"
"go.etcd.io/etcd/etcdserver/api/etcdhttp"
"go.etcd.io/etcd/etcdserver/api/rafthttp"
"go.etcd.io/etcd/etcdserver/api/v2http"
"go.etcd.io/etcd/etcdserver/api/v3client"
"go.etcd.io/etcd/etcdserver/api/v3election"
epb "go.etcd.io/etcd/etcdserver/api/v3election/v3electionpb"
"go.etcd.io/etcd/etcdserver/api/v3lock"
lockpb "go.etcd.io/etcd/etcdserver/api/v3lock/v3lockpb"
"go.etcd.io/etcd/etcdserver/api/v3rpc"
pb "go.etcd.io/etcd/etcdserver/etcdserverpb"
"go.etcd.io/etcd/pkg/logutil"
"go.etcd.io/etcd/pkg/testutil"
"go.etcd.io/etcd/pkg/tlsutil"
"go.etcd.io/etcd/pkg/transport"
"go.etcd.io/etcd/pkg/types"
"github.com/soheilhy/cmux"
"go.uber.org/zap"
"golang.org/x/crypto/bcrypt"
"google.golang.org/grpc"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/keepalive"
)
const (
// RequestWaitTimeout is the time duration to wait for a request to go through or detect leader loss.
RequestWaitTimeout = 3 * time.Second
tickDuration = 10 * time.Millisecond
requestTimeout = 20 * time.Second
clusterName = "etcd"
basePort = 21000
URLScheme = "unix"
URLSchemeTLS = "unixs"
)
var (
electionTicks = 10
// integration test uses unique ports, counting up, to listen for each
// member, ensuring restarted members can listen on the same port again.
localListenCount = int64(0)
testTLSInfo = transport.TLSInfo{
KeyFile: "./fixtures/server.key.insecure",
CertFile: "./fixtures/server.crt",
TrustedCAFile: "./fixtures/ca.crt",
ClientCertAuth: true,
}
testTLSInfoIP = transport.TLSInfo{
KeyFile: "./fixtures/server-ip.key.insecure",
CertFile: "./fixtures/server-ip.crt",
TrustedCAFile: "./fixtures/ca.crt",
ClientCertAuth: true,
}
testTLSInfoExpired = transport.TLSInfo{
KeyFile: "./fixtures-expired/server.key.insecure",
CertFile: "./fixtures-expired/server.crt",
TrustedCAFile: "./fixtures-expired/ca.crt",
ClientCertAuth: true,
}
testTLSInfoExpiredIP = transport.TLSInfo{
KeyFile: "./fixtures-expired/server-ip.key.insecure",
CertFile: "./fixtures-expired/server-ip.crt",
TrustedCAFile: "./fixtures-expired/ca.crt",
ClientCertAuth: true,
}
defaultTokenJWT = "jwt,pub-key=./fixtures/server.crt,priv-key=./fixtures/server.key.insecure,sign-method=RS256,ttl=1s"
lg = zap.NewNop()
)
func init() {
if os.Getenv("CLUSTER_DEBUG") != "" {
lg, _ = zap.NewProduction()
}
}
type ClusterConfig struct {
Size int
PeerTLS *transport.TLSInfo
ClientTLS *transport.TLSInfo
DiscoveryURL string
AuthToken string
UseGRPC bool
QuotaBackendBytes int64
MaxTxnOps uint
MaxRequestBytes uint
SnapshotCount uint64
SnapshotCatchUpEntries uint64
GRPCKeepAliveMinTime time.Duration
GRPCKeepAliveInterval time.Duration
GRPCKeepAliveTimeout time.Duration
// SkipCreatingClient to skip creating clients for each member.
SkipCreatingClient bool
ClientMaxCallSendMsgSize int
ClientMaxCallRecvMsgSize int
// UseIP is true to use only IP for gRPC requests.
UseIP bool
LeaseCheckpointInterval time.Duration
}
type cluster struct {
cfg *ClusterConfig
Members []*member
}
func schemeFromTLSInfo(tls *transport.TLSInfo) string {
if tls == nil {
return URLScheme
}
return URLSchemeTLS
}
func (c *cluster) fillClusterForMembers() error {
if c.cfg.DiscoveryURL != "" {
// cluster will be discovered
return nil
}
addrs := make([]string, 0)
for _, m := range c.Members {
scheme := schemeFromTLSInfo(m.PeerTLSInfo)
for _, l := range m.PeerListeners {
addrs = append(addrs, fmt.Sprintf("%s=%s://%s", m.Name, scheme, l.Addr().String()))
}
}
clusterStr := strings.Join(addrs, ",")
var err error
for _, m := range c.Members {
m.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
return err
}
}
return nil
}
func newCluster(t testing.TB, cfg *ClusterConfig) *cluster {
c := &cluster{cfg: cfg}
ms := make([]*member, cfg.Size)
for i := 0; i < cfg.Size; i++ {
ms[i] = c.mustNewMember(t)
}
c.Members = ms
if err := c.fillClusterForMembers(); err != nil {
t.Fatal(err)
}
return c
}
// NewCluster returns an unlaunched cluster of the given size which has been
// set to use static bootstrap.
func NewCluster(t testing.TB, size int) *cluster {
return newCluster(t, &ClusterConfig{Size: size})
}
// NewClusterByConfig returns an unlaunched cluster defined by a cluster configuration
func NewClusterByConfig(t testing.TB, cfg *ClusterConfig) *cluster {
return newCluster(t, cfg)
}
func (c *cluster) Launch(t testing.TB) {
errc := make(chan error)
for _, m := range c.Members {
// Members are launched in separate goroutines because if they boot
// using discovery url, they have to wait for others to register to continue.
go func(m *member) {
errc <- m.Launch()
}(m)
}
for range c.Members {
if err := <-errc; err != nil {
t.Fatalf("error setting up member: %v", err)
}
}
// wait cluster to be stable to receive future client requests
c.waitMembersMatch(t, c.HTTPMembers())
c.waitVersion()
}
func (c *cluster) URL(i int) string {
return c.Members[i].ClientURLs[0].String()
}
// URLs returns a list of all active client URLs in the cluster
func (c *cluster) URLs() []string {
return getMembersURLs(c.Members)
}
func getMembersURLs(members []*member) []string {
urls := make([]string, 0)
for _, m := range members {
select {
case <-m.s.StopNotify():
continue
default:
}
for _, u := range m.ClientURLs {
urls = append(urls, u.String())
}
}
return urls
}
// HTTPMembers returns a list of all active members as client.Members
func (c *cluster) HTTPMembers() []client.Member {
ms := []client.Member{}
for _, m := range c.Members {
pScheme := schemeFromTLSInfo(m.PeerTLSInfo)
cScheme := schemeFromTLSInfo(m.ClientTLSInfo)
cm := client.Member{Name: m.Name}
for _, ln := range m.PeerListeners {
cm.PeerURLs = append(cm.PeerURLs, pScheme+"://"+ln.Addr().String())
}
for _, ln := range m.ClientListeners {
cm.ClientURLs = append(cm.ClientURLs, cScheme+"://"+ln.Addr().String())
}
ms = append(ms, cm)
}
return ms
}
func (c *cluster) mustNewMember(t testing.TB) *member {
m := mustNewMember(t,
memberConfig{
name: c.name(rand.Int()),
authToken: c.cfg.AuthToken,
peerTLS: c.cfg.PeerTLS,
clientTLS: c.cfg.ClientTLS,
quotaBackendBytes: c.cfg.QuotaBackendBytes,
maxTxnOps: c.cfg.MaxTxnOps,
maxRequestBytes: c.cfg.MaxRequestBytes,
snapshotCount: c.cfg.SnapshotCount,
snapshotCatchUpEntries: c.cfg.SnapshotCatchUpEntries,
grpcKeepAliveMinTime: c.cfg.GRPCKeepAliveMinTime,
grpcKeepAliveInterval: c.cfg.GRPCKeepAliveInterval,
grpcKeepAliveTimeout: c.cfg.GRPCKeepAliveTimeout,
clientMaxCallSendMsgSize: c.cfg.ClientMaxCallSendMsgSize,
clientMaxCallRecvMsgSize: c.cfg.ClientMaxCallRecvMsgSize,
useIP: c.cfg.UseIP,
leaseCheckpointInterval: c.cfg.LeaseCheckpointInterval,
})
m.DiscoveryURL = c.cfg.DiscoveryURL
if c.cfg.UseGRPC {
if err := m.listenGRPC(); err != nil {
t.Fatal(err)
}
}
return m
}
func (c *cluster) addMember(t testing.TB) {
m := c.mustNewMember(t)
scheme := schemeFromTLSInfo(c.cfg.PeerTLS)
// send add request to the cluster
var err error
for i := 0; i < len(c.Members); i++ {
clientURL := c.URL(i)
peerURL := scheme + "://" + m.PeerListeners[0].Addr().String()
if err = c.addMemberByURL(t, clientURL, peerURL); err == nil {
break
}
}
if err != nil {
t.Fatalf("add member failed on all members error: %v", err)
}
m.InitialPeerURLsMap = types.URLsMap{}
for _, mm := range c.Members {
m.InitialPeerURLsMap[mm.Name] = mm.PeerURLs
}
m.InitialPeerURLsMap[m.Name] = m.PeerURLs
m.NewCluster = false
if err := m.Launch(); err != nil {
t.Fatal(err)
}
c.Members = append(c.Members, m)
// wait cluster to be stable to receive future client requests
c.waitMembersMatch(t, c.HTTPMembers())
}
func (c *cluster) addMemberByURL(t testing.TB, clientURL, peerURL string) error {
cc := MustNewHTTPClient(t, []string{clientURL}, c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
_, err := ma.Add(ctx, peerURL)
cancel()
if err != nil {
return err
}
// wait for the add node entry applied in the cluster
members := append(c.HTTPMembers(), client.Member{PeerURLs: []string{peerURL}, ClientURLs: []string{}})
c.waitMembersMatch(t, members)
return nil
}
func (c *cluster) AddMember(t testing.TB) {
c.addMember(t)
}
func (c *cluster) RemoveMember(t testing.TB, id uint64) {
if err := c.removeMember(t, id); err != nil {
t.Fatal(err)
}
}
func (c *cluster) removeMember(t testing.TB, id uint64) error {
// send remove request to the cluster
cc := MustNewHTTPClient(t, c.URLs(), c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
err := ma.Remove(ctx, types.ID(id).String())
cancel()
if err != nil {
return err
}
newMembers := make([]*member, 0)
for _, m := range c.Members {
if uint64(m.s.ID()) != id {
newMembers = append(newMembers, m)
} else {
select {
case <-m.s.StopNotify():
m.Terminate(t)
// 1s stop delay + election timeout + 1s disk and network delay + connection write timeout
// TODO: remove connection write timeout by selecting on http response closeNotifier
// blocking on https://github.com/golang/go/issues/9524
case <-time.After(time.Second + time.Duration(electionTicks)*tickDuration + time.Second + rafthttp.ConnWriteTimeout):
t.Fatalf("failed to remove member %s in time", m.s.ID())
}
}
}
c.Members = newMembers
c.waitMembersMatch(t, c.HTTPMembers())
return nil
}
func (c *cluster) Terminate(t testing.TB) {
var wg sync.WaitGroup
wg.Add(len(c.Members))
for _, m := range c.Members {
go func(mm *member) {
defer wg.Done()
mm.Terminate(t)
}(m)
}
wg.Wait()
}
func (c *cluster) waitMembersMatch(t testing.TB, membs []client.Member) {
for _, u := range c.URLs() {
cc := MustNewHTTPClient(t, []string{u}, c.cfg.ClientTLS)
ma := client.NewMembersAPI(cc)
for {
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
ms, err := ma.List(ctx)
cancel()
if err == nil && isMembersEqual(ms, membs) {
break
}
time.Sleep(tickDuration)
}
}
}
func (c *cluster) WaitLeader(t testing.TB) int { return c.waitLeader(t, c.Members) }
// waitLeader waits until given members agree on the same leader.
func (c *cluster) waitLeader(t testing.TB, membs []*member) int {
possibleLead := make(map[uint64]bool)
var lead uint64
for _, m := range membs {
possibleLead[uint64(m.s.ID())] = true
}
cc := MustNewHTTPClient(t, getMembersURLs(membs), nil)
kapi := client.NewKeysAPI(cc)
// ensure leader is up via linearizable get
for {
ctx, cancel := context.WithTimeout(context.Background(), 10*tickDuration+time.Second)
_, err := kapi.Get(ctx, "0", &client.GetOptions{Quorum: true})
cancel()
if err == nil || strings.Contains(err.Error(), "Key not found") {
break
}
}
for lead == 0 || !possibleLead[lead] {
lead = 0
for _, m := range membs {
select {
case <-m.s.StopNotify():
continue
default:
}
if lead != 0 && lead != m.s.Lead() {
lead = 0
time.Sleep(10 * tickDuration)
break
}
lead = m.s.Lead()
}
}
for i, m := range membs {
if uint64(m.s.ID()) == lead {
return i
}
}
return -1
}
func (c *cluster) WaitNoLeader() { c.waitNoLeader(c.Members) }
// waitNoLeader waits until given members lose leader.
func (c *cluster) waitNoLeader(membs []*member) {
noLeader := false
for !noLeader {
noLeader = true
for _, m := range membs {
select {
case <-m.s.StopNotify():
continue
default:
}
if m.s.Lead() != 0 {
noLeader = false
time.Sleep(10 * tickDuration)
break
}
}
}
}
func (c *cluster) waitVersion() {
for _, m := range c.Members {
for {
if m.s.ClusterVersion() != nil {
break
}
time.Sleep(tickDuration)
}
}
}
func (c *cluster) name(i int) string {
return fmt.Sprint(i)
}
// isMembersEqual checks whether two members equal except ID field.
// The given wmembs should always set ID field to empty string.
func isMembersEqual(membs []client.Member, wmembs []client.Member) bool {
sort.Sort(SortableMemberSliceByPeerURLs(membs))
sort.Sort(SortableMemberSliceByPeerURLs(wmembs))
for i := range membs {
membs[i].ID = ""
}
return reflect.DeepEqual(membs, wmembs)
}
func newLocalListener(t testing.TB) net.Listener {
c := atomic.AddInt64(&localListenCount, 1)
// Go 1.8+ allows only numbers in port
addr := fmt.Sprintf("127.0.0.1:%05d%05d", c+basePort, os.Getpid())
return NewListenerWithAddr(t, addr)
}
func NewListenerWithAddr(t testing.TB, addr string) net.Listener {
l, err := transport.NewUnixListener(addr)
if err != nil {
t.Fatal(err)
}
return l
}
type member struct {
etcdserver.ServerConfig
PeerListeners, ClientListeners []net.Listener
grpcListener net.Listener
// PeerTLSInfo enables peer TLS when set
PeerTLSInfo *transport.TLSInfo
// ClientTLSInfo enables client TLS when set
ClientTLSInfo *transport.TLSInfo
DialOptions []grpc.DialOption
raftHandler *testutil.PauseableHandler
s *etcdserver.EtcdServer
serverClosers []func()
grpcServerOpts []grpc.ServerOption
grpcServer *grpc.Server
grpcServerPeer *grpc.Server
grpcAddr string
grpcBridge *bridge
// serverClient is a clientv3 that directly calls the etcdserver.
serverClient *clientv3.Client
keepDataDirTerminate bool
clientMaxCallSendMsgSize int
clientMaxCallRecvMsgSize int
useIP bool
}
func (m *member) GRPCAddr() string { return m.grpcAddr }
type memberConfig struct {
name string
peerTLS *transport.TLSInfo
clientTLS *transport.TLSInfo
authToken string
quotaBackendBytes int64
maxTxnOps uint
maxRequestBytes uint
snapshotCount uint64
snapshotCatchUpEntries uint64
grpcKeepAliveMinTime time.Duration
grpcKeepAliveInterval time.Duration
grpcKeepAliveTimeout time.Duration
clientMaxCallSendMsgSize int
clientMaxCallRecvMsgSize int
useIP bool
leaseCheckpointInterval time.Duration
}
// mustNewMember return an inited member with the given name. If peerTLS is
// set, it will use https scheme to communicate between peers.
func mustNewMember(t testing.TB, mcfg memberConfig) *member {
var err error
m := &member{}
peerScheme := schemeFromTLSInfo(mcfg.peerTLS)
clientScheme := schemeFromTLSInfo(mcfg.clientTLS)
pln := newLocalListener(t)
m.PeerListeners = []net.Listener{pln}
m.PeerURLs, err = types.NewURLs([]string{peerScheme + "://" + pln.Addr().String()})
if err != nil {
t.Fatal(err)
}
m.PeerTLSInfo = mcfg.peerTLS
cln := newLocalListener(t)
m.ClientListeners = []net.Listener{cln}
m.ClientURLs, err = types.NewURLs([]string{clientScheme + "://" + cln.Addr().String()})
if err != nil {
t.Fatal(err)
}
m.ClientTLSInfo = mcfg.clientTLS
m.Name = mcfg.name
m.DataDir, err = ioutil.TempDir(os.TempDir(), "etcd")
if err != nil {
t.Fatal(err)
}
clusterStr := fmt.Sprintf("%s=%s://%s", mcfg.name, peerScheme, pln.Addr().String())
m.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
t.Fatal(err)
}
m.InitialClusterToken = clusterName
m.NewCluster = true
m.BootstrapTimeout = 10 * time.Millisecond
if m.PeerTLSInfo != nil {
m.ServerConfig.PeerTLSInfo = *m.PeerTLSInfo
}
m.ElectionTicks = electionTicks
m.InitialElectionTickAdvance = true
m.TickMs = uint(tickDuration / time.Millisecond)
m.QuotaBackendBytes = mcfg.quotaBackendBytes
m.MaxTxnOps = mcfg.maxTxnOps
if m.MaxTxnOps == 0 {
m.MaxTxnOps = embed.DefaultMaxTxnOps
}
m.MaxRequestBytes = mcfg.maxRequestBytes
if m.MaxRequestBytes == 0 {
m.MaxRequestBytes = embed.DefaultMaxRequestBytes
}
m.SnapshotCount = etcdserver.DefaultSnapshotCount
if mcfg.snapshotCount != 0 {
m.SnapshotCount = mcfg.snapshotCount
}
m.SnapshotCatchUpEntries = etcdserver.DefaultSnapshotCatchUpEntries
if mcfg.snapshotCatchUpEntries != 0 {
m.SnapshotCatchUpEntries = mcfg.snapshotCatchUpEntries
}
// for the purpose of integration testing, simple token is enough
m.AuthToken = "simple"
if mcfg.authToken != "" {
m.AuthToken = mcfg.authToken
}
m.BcryptCost = uint(bcrypt.MinCost) // use min bcrypt cost to speedy up integration testing
m.grpcServerOpts = []grpc.ServerOption{}
if mcfg.grpcKeepAliveMinTime > time.Duration(0) {
m.grpcServerOpts = append(m.grpcServerOpts, grpc.KeepaliveEnforcementPolicy(keepalive.EnforcementPolicy{
MinTime: mcfg.grpcKeepAliveMinTime,
PermitWithoutStream: false,
}))
}
if mcfg.grpcKeepAliveInterval > time.Duration(0) &&
mcfg.grpcKeepAliveTimeout > time.Duration(0) {
m.grpcServerOpts = append(m.grpcServerOpts, grpc.KeepaliveParams(keepalive.ServerParameters{
Time: mcfg.grpcKeepAliveInterval,
Timeout: mcfg.grpcKeepAliveTimeout,
}))
}
m.clientMaxCallSendMsgSize = mcfg.clientMaxCallSendMsgSize
m.clientMaxCallRecvMsgSize = mcfg.clientMaxCallRecvMsgSize
m.useIP = mcfg.useIP
m.LeaseCheckpointInterval = mcfg.leaseCheckpointInterval
m.InitialCorruptCheck = true
lcfg := logutil.DefaultZapLoggerConfig
m.LoggerConfig = &lcfg
m.LoggerConfig.OutputPaths = []string{"/dev/null"}
m.LoggerConfig.ErrorOutputPaths = []string{"/dev/null"}
if os.Getenv("CLUSTER_DEBUG") != "" {
m.LoggerConfig.OutputPaths = []string{"stderr"}
m.LoggerConfig.ErrorOutputPaths = []string{"stderr"}
}
m.Logger, err = m.LoggerConfig.Build()
if err != nil {
t.Fatal(err)
}
return m
}
// listenGRPC starts a grpc server over a unix domain socket on the member
func (m *member) listenGRPC() error {
// prefix with localhost so cert has right domain
m.grpcAddr = "localhost:" + m.Name
if m.useIP { // for IP-only TLS certs
m.grpcAddr = "127.0.0.1:" + m.Name
}
l, err := transport.NewUnixListener(m.grpcAddr)
if err != nil {
return fmt.Errorf("listen failed on grpc socket %s (%v)", m.grpcAddr, err)
}
m.grpcBridge, err = newBridge(m.grpcAddr)
if err != nil {
l.Close()
return err
}
m.grpcAddr = schemeFromTLSInfo(m.ClientTLSInfo) + "://" + m.grpcBridge.inaddr
m.grpcListener = l
return nil
}
func (m *member) ElectionTimeout() time.Duration {
return time.Duration(m.s.Cfg.ElectionTicks*int(m.s.Cfg.TickMs)) * time.Millisecond
}
func (m *member) ID() types.ID { return m.s.ID() }
func (m *member) DropConnections() { m.grpcBridge.Reset() }
func (m *member) PauseConnections() { m.grpcBridge.Pause() }
func (m *member) UnpauseConnections() { m.grpcBridge.Unpause() }
func (m *member) Blackhole() { m.grpcBridge.Blackhole() }
func (m *member) Unblackhole() { m.grpcBridge.Unblackhole() }
// NewClientV3 creates a new grpc client connection to the member
func NewClientV3(m *member) (*clientv3.Client, error) {
if m.grpcAddr == "" {
return nil, fmt.Errorf("member not configured for grpc")
}
cfg := clientv3.Config{
Endpoints: []string{m.grpcAddr},
DialTimeout: 5 * time.Second,
DialOptions: []grpc.DialOption{grpc.WithBlock()},
MaxCallSendMsgSize: m.clientMaxCallSendMsgSize,
MaxCallRecvMsgSize: m.clientMaxCallRecvMsgSize,
}
if m.ClientTLSInfo != nil {
tls, err := m.ClientTLSInfo.ClientConfig()
if err != nil {
return nil, err
}
cfg.TLS = tls
}
if m.DialOptions != nil {
cfg.DialOptions = append(cfg.DialOptions, m.DialOptions...)
}
return newClientV3(cfg)
}
// Clone returns a member with the same server configuration. The returned
// member will not set PeerListeners and ClientListeners.
func (m *member) Clone(t testing.TB) *member {
mm := &member{}
mm.ServerConfig = m.ServerConfig
var err error
clientURLStrs := m.ClientURLs.StringSlice()
mm.ClientURLs, err = types.NewURLs(clientURLStrs)
if err != nil {
// this should never fail
panic(err)
}
peerURLStrs := m.PeerURLs.StringSlice()
mm.PeerURLs, err = types.NewURLs(peerURLStrs)
if err != nil {
// this should never fail
panic(err)
}
clusterStr := m.InitialPeerURLsMap.String()
mm.InitialPeerURLsMap, err = types.NewURLsMap(clusterStr)
if err != nil {
// this should never fail
panic(err)
}
mm.InitialClusterToken = m.InitialClusterToken
mm.ElectionTicks = m.ElectionTicks
mm.PeerTLSInfo = m.PeerTLSInfo
mm.ClientTLSInfo = m.ClientTLSInfo
return mm
}
// Launch starts a member based on ServerConfig, PeerListeners
// and ClientListeners.
func (m *member) Launch() error {
lg.Info(
"launching a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
var err error
if m.s, err = etcdserver.NewServer(m.ServerConfig); err != nil {
return fmt.Errorf("failed to initialize the etcd server: %v", err)
}
m.s.SyncTicker = time.NewTicker(500 * time.Millisecond)
m.s.Start()
var peerTLScfg *tls.Config
if m.PeerTLSInfo != nil && !m.PeerTLSInfo.Empty() {
if peerTLScfg, err = m.PeerTLSInfo.ServerConfig(); err != nil {
return err
}
}
if m.grpcListener != nil {
var (
tlscfg *tls.Config
)
if m.ClientTLSInfo != nil && !m.ClientTLSInfo.Empty() {
tlscfg, err = m.ClientTLSInfo.ServerConfig()
if err != nil {
return err
}
}
m.grpcServer = v3rpc.Server(m.s, tlscfg, m.grpcServerOpts...)
m.grpcServerPeer = v3rpc.Server(m.s, peerTLScfg)
m.serverClient = v3client.New(m.s)
lockpb.RegisterLockServer(m.grpcServer, v3lock.NewLockServer(m.serverClient))
epb.RegisterElectionServer(m.grpcServer, v3election.NewElectionServer(m.serverClient))
go m.grpcServer.Serve(m.grpcListener)
}
m.raftHandler = &testutil.PauseableHandler{Next: etcdhttp.NewPeerHandler(m.Logger, m.s)}
h := (http.Handler)(m.raftHandler)
if m.grpcListener != nil {
h = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.ProtoMajor == 2 && strings.Contains(r.Header.Get("Content-Type"), "application/grpc") {
m.grpcServerPeer.ServeHTTP(w, r)
} else {
m.raftHandler.ServeHTTP(w, r)
}
})
}
for _, ln := range m.PeerListeners {
cm := cmux.New(ln)
// don't hang on matcher after closing listener
cm.SetReadTimeout(time.Second)
if m.grpcServer != nil {
grpcl := cm.Match(cmux.HTTP2())
go m.grpcServerPeer.Serve(grpcl)
}
// serve http1/http2 rafthttp/grpc
ll := cm.Match(cmux.Any())
if peerTLScfg != nil {
if ll, err = transport.NewTLSListener(ll, m.PeerTLSInfo); err != nil {
return err
}
}
hs := &httptest.Server{
Listener: ll,
Config: &http.Server{
Handler: h,
TLSConfig: peerTLScfg,
ErrorLog: log.New(ioutil.Discard, "net/http", 0),
},
TLS: peerTLScfg,
}
hs.Start()
donec := make(chan struct{})
go func() {
defer close(donec)
cm.Serve()
}()
closer := func() {
ll.Close()
hs.CloseClientConnections()
hs.Close()
<-donec
}
m.serverClosers = append(m.serverClosers, closer)
}
for _, ln := range m.ClientListeners {
hs := &httptest.Server{
Listener: ln,
Config: &http.Server{
Handler: v2http.NewClientHandler(
m.Logger,
m.s,
m.ServerConfig.ReqTimeout(),
),
ErrorLog: log.New(ioutil.Discard, "net/http", 0),
},
}
if m.ClientTLSInfo == nil {
hs.Start()
} else {
info := m.ClientTLSInfo
hs.TLS, err = info.ServerConfig()
if err != nil {
return err
}
// baseConfig is called on initial TLS handshake start.
//
// Previously,
// 1. Server has non-empty (*tls.Config).Certificates on client hello
// 2. Server calls (*tls.Config).GetCertificate iff:
// - Server's (*tls.Config).Certificates is not empty, or
// - Client supplies SNI; non-empty (*tls.ClientHelloInfo).ServerName
//
// When (*tls.Config).Certificates is always populated on initial handshake,
// client is expected to provide a valid matching SNI to pass the TLS
// verification, thus trigger server (*tls.Config).GetCertificate to reload
// TLS assets. However, a cert whose SAN field does not include domain names
// but only IP addresses, has empty (*tls.ClientHelloInfo).ServerName, thus
// it was never able to trigger TLS reload on initial handshake; first
// ceritifcate object was being used, never being updated.
//
// Now, (*tls.Config).Certificates is created empty on initial TLS client
// handshake, in order to trigger (*tls.Config).GetCertificate and populate
// rest of the certificates on every new TLS connection, even when client
// SNI is empty (e.g. cert only includes IPs).
//
// This introduces another problem with "httptest.Server":
// when server initial certificates are empty, certificates
// are overwritten by Go's internal test certs, which have
// different SAN fields (e.g. example.com). To work around,
// re-overwrite (*tls.Config).Certificates before starting
// test server.
tlsCert, err := tlsutil.NewCert(info.CertFile, info.KeyFile, nil)
if err != nil {
return err
}
hs.TLS.Certificates = []tls.Certificate{*tlsCert}
hs.StartTLS()
}
closer := func() {
ln.Close()
hs.CloseClientConnections()
hs.Close()
}
m.serverClosers = append(m.serverClosers, closer)
}
lg.Info(
"launched a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
return nil
}
func (m *member) WaitOK(t testing.TB) {
m.WaitStarted(t)
for m.s.Leader() == 0 {
time.Sleep(tickDuration)
}
}
func (m *member) WaitStarted(t testing.TB) {
cc := MustNewHTTPClient(t, []string{m.URL()}, m.ClientTLSInfo)
kapi := client.NewKeysAPI(cc)
for {
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
_, err := kapi.Get(ctx, "/", nil)
if err != nil {
time.Sleep(tickDuration)
continue
}
cancel()
break
}
}
func WaitClientV3(t testing.TB, kv clientv3.KV) {
timeout := time.Now().Add(requestTimeout)
var err error
for time.Now().Before(timeout) {
ctx, cancel := context.WithTimeout(context.Background(), requestTimeout)
_, err = kv.Get(ctx, "/")
cancel()
if err == nil {
return
}
time.Sleep(tickDuration)
}
if err != nil {
t.Fatalf("timed out waiting for client: %v", err)
}
}
func (m *member) URL() string { return m.ClientURLs[0].String() }
func (m *member) Pause() {
m.raftHandler.Pause()
m.s.PauseSending()
}
func (m *member) Resume() {
m.raftHandler.Resume()
m.s.ResumeSending()
}
// Close stops the member's etcdserver and closes its connections
func (m *member) Close() {
if m.grpcBridge != nil {
m.grpcBridge.Close()
m.grpcBridge = nil
}
if m.serverClient != nil {
m.serverClient.Close()
m.serverClient = nil
}
if m.grpcServer != nil {
m.grpcServer.Stop()
m.grpcServer.GracefulStop()
m.grpcServer = nil
m.grpcServerPeer.Stop()
m.grpcServerPeer.GracefulStop()
m.grpcServerPeer = nil
}
m.s.HardStop()
for _, f := range m.serverClosers {
f()
}
}
// Stop stops the member, but the data dir of the member is preserved.
func (m *member) Stop(t testing.TB) {
lg.Info(
"stopping a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
m.Close()
m.serverClosers = nil
lg.Info(
"stopped a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
}
// checkLeaderTransition waits for leader transition, returning the new leader ID.
func checkLeaderTransition(m *member, oldLead uint64) uint64 {
interval := time.Duration(m.s.Cfg.TickMs) * time.Millisecond
for m.s.Lead() == 0 || (m.s.Lead() == oldLead) {
time.Sleep(interval)
}
return m.s.Lead()
}
// StopNotify unblocks when a member stop completes
func (m *member) StopNotify() <-chan struct{} {
return m.s.StopNotify()
}
// Restart starts the member using the preserved data dir.
func (m *member) Restart(t testing.TB) error {
lg.Info(
"restarting a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
newPeerListeners := make([]net.Listener, 0)
for _, ln := range m.PeerListeners {
newPeerListeners = append(newPeerListeners, NewListenerWithAddr(t, ln.Addr().String()))
}
m.PeerListeners = newPeerListeners
newClientListeners := make([]net.Listener, 0)
for _, ln := range m.ClientListeners {
newClientListeners = append(newClientListeners, NewListenerWithAddr(t, ln.Addr().String()))
}
m.ClientListeners = newClientListeners
if m.grpcListener != nil {
if err := m.listenGRPC(); err != nil {
t.Fatal(err)
}
}
err := m.Launch()
lg.Info(
"restarted a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
zap.Error(err),
)
return err
}
// Terminate stops the member and removes the data dir.
func (m *member) Terminate(t testing.TB) {
lg.Info(
"terminating a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
m.Close()
if !m.keepDataDirTerminate {
if err := os.RemoveAll(m.ServerConfig.DataDir); err != nil {
t.Fatal(err)
}
}
lg.Info(
"terminated a member",
zap.String("name", m.Name),
zap.Strings("advertise-peer-urls", m.PeerURLs.StringSlice()),
zap.Strings("listen-client-urls", m.ClientURLs.StringSlice()),
zap.String("grpc-address", m.grpcAddr),
)
}
// Metric gets the metric value for a member
func (m *member) Metric(metricName string) (string, error) {
cfgtls := transport.TLSInfo{}
tr, err := transport.NewTimeoutTransport(cfgtls, time.Second, time.Second, time.Second)
if err != nil {
return "", err
}
cli := &http.Client{Transport: tr}
resp, err := cli.Get(m.ClientURLs[0].String() + "/metrics")
if err != nil {
return "", err
}
defer resp.Body.Close()
b, rerr := ioutil.ReadAll(resp.Body)
if rerr != nil {
return "", rerr
}
lines := strings.Split(string(b), "\n")
for _, l := range lines {
if strings.HasPrefix(l, metricName) {
return strings.Split(l, " ")[1], nil
}
}
return "", nil
}
// InjectPartition drops connections from m to others, vice versa.
func (m *member) InjectPartition(t testing.TB, others ...*member) {
for _, other := range others {
m.s.CutPeer(other.s.ID())
other.s.CutPeer(m.s.ID())
}
}
// RecoverPartition recovers connections from m to others, vice versa.
func (m *member) RecoverPartition(t testing.TB, others ...*member) {
for _, other := range others {
m.s.MendPeer(other.s.ID())
other.s.MendPeer(m.s.ID())
}
}
func MustNewHTTPClient(t testing.TB, eps []string, tls *transport.TLSInfo) client.Client {
cfgtls := transport.TLSInfo{}
if tls != nil {
cfgtls = *tls
}
cfg := client.Config{Transport: mustNewTransport(t, cfgtls), Endpoints: eps}
c, err := client.New(cfg)
if err != nil {
t.Fatal(err)
}
return c
}
func mustNewTransport(t testing.TB, tlsInfo transport.TLSInfo) *http.Transport {
// tick in integration test is short, so 1s dial timeout could play well.
tr, err := transport.NewTimeoutTransport(tlsInfo, time.Second, rafthttp.ConnReadTimeout, rafthttp.ConnWriteTimeout)
if err != nil {
t.Fatal(err)
}
return tr
}
type SortableMemberSliceByPeerURLs []client.Member
func (p SortableMemberSliceByPeerURLs) Len() int { return len(p) }
func (p SortableMemberSliceByPeerURLs) Less(i, j int) bool {
return p[i].PeerURLs[0] < p[j].PeerURLs[0]
}
func (p SortableMemberSliceByPeerURLs) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
type ClusterV3 struct {
*cluster
mu sync.Mutex
clients []*clientv3.Client
}
// NewClusterV3 returns a launched cluster with a grpc client connection
// for each cluster member.
func NewClusterV3(t testing.TB, cfg *ClusterConfig) *ClusterV3 {
cfg.UseGRPC = true
if os.Getenv("CLIENT_DEBUG") != "" {
clientv3.SetLogger(grpclog.NewLoggerV2WithVerbosity(os.Stderr, os.Stderr, os.Stderr, 4))
}
clus := &ClusterV3{
cluster: NewClusterByConfig(t, cfg),
}
clus.Launch(t)
if !cfg.SkipCreatingClient {
for _, m := range clus.Members {
client, err := NewClientV3(m)
if err != nil {
t.Fatalf("cannot create client: %v", err)
}
clus.clients = append(clus.clients, client)
}
}
return clus
}
func (c *ClusterV3) TakeClient(idx int) {
c.mu.Lock()
c.clients[idx] = nil
c.mu.Unlock()
}
func (c *ClusterV3) Terminate(t testing.TB) {
c.mu.Lock()
for _, client := range c.clients {
if client == nil {
continue
}
if err := client.Close(); err != nil {
t.Error(err)
}
}
c.mu.Unlock()
c.cluster.Terminate(t)
}
func (c *ClusterV3) RandClient() *clientv3.Client {
return c.clients[rand.Intn(len(c.clients))]
}
func (c *ClusterV3) Client(i int) *clientv3.Client {
return c.clients[i]
}
type grpcAPI struct {
// Cluster is the cluster API for the client's connection.
Cluster pb.ClusterClient
// KV is the keyvalue API for the client's connection.
KV pb.KVClient
// Lease is the lease API for the client's connection.
Lease pb.LeaseClient
// Watch is the watch API for the client's connection.
Watch pb.WatchClient
// Maintenance is the maintenance API for the client's connection.
Maintenance pb.MaintenanceClient
// Auth is the authentication API for the client's connection.
Auth pb.AuthClient
// Lock is the lock API for the client's connection.
Lock lockpb.LockClient
// Election is the election API for the client's connection.
Election epb.ElectionClient
}
| mkumatag/etcd | integration/cluster.go | GO | apache-2.0 | 35,192 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.ActivitiIllegalArgumentException;
import org.activiti.engine.RuntimeService;
import org.activiti.engine.delegate.event.ActivitiEvent;
import org.activiti.engine.delegate.event.ActivitiEventListener;
import org.activiti.engine.delegate.event.ActivitiEventType;
import org.activiti.engine.form.FormData;
import org.activiti.engine.impl.cmd.ActivateProcessInstanceCmd;
import org.activiti.engine.impl.cmd.AddEventListenerCommand;
import org.activiti.engine.impl.cmd.AddIdentityLinkForProcessInstanceCmd;
import org.activiti.engine.impl.cmd.DeleteIdentityLinkForProcessInstanceCmd;
import org.activiti.engine.impl.cmd.DeleteProcessInstanceCmd;
import org.activiti.engine.impl.cmd.DispatchEventCommand;
import org.activiti.engine.impl.cmd.FindActiveActivityIdsCmd;
import org.activiti.engine.impl.cmd.GetExecutionVariableCmd;
import org.activiti.engine.impl.cmd.GetExecutionVariablesCmd;
import org.activiti.engine.impl.cmd.GetIdentityLinksForProcessInstanceCmd;
import org.activiti.engine.impl.cmd.GetProcessInstanceEventsCmd;
import org.activiti.engine.impl.cmd.GetStartFormCmd;
import org.activiti.engine.impl.cmd.HasExecutionVariableCmd;
import org.activiti.engine.impl.cmd.MessageEventReceivedCmd;
import org.activiti.engine.impl.cmd.RemoveEventListenerCommand;
import org.activiti.engine.impl.cmd.RemoveExecutionVariablesCmd;
import org.activiti.engine.impl.cmd.SetExecutionVariablesCmd;
import org.activiti.engine.impl.cmd.SetProcessInstanceBusinessKeyCmd;
import org.activiti.engine.impl.cmd.SetProcessInstanceNameCmd;
import org.activiti.engine.impl.cmd.SignalCmd;
import org.activiti.engine.impl.cmd.SignalEventReceivedCmd;
import org.activiti.engine.impl.cmd.StartProcessInstanceByMessageCmd;
import org.activiti.engine.impl.cmd.StartProcessInstanceCmd;
import org.activiti.engine.impl.cmd.SuspendProcessInstanceCmd;
import org.activiti.engine.impl.runtime.ProcessInstanceBuilderImpl;
import org.activiti.engine.runtime.ExecutionQuery;
import org.activiti.engine.runtime.NativeExecutionQuery;
import org.activiti.engine.runtime.NativeProcessInstanceQuery;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.runtime.ProcessInstanceQuery;
import org.activiti.engine.runtime.ProcessInstanceBuilder;
import org.activiti.engine.task.Event;
import org.activiti.engine.task.IdentityLink;
import org.activiti.engine.task.IdentityLinkType;
/**
* @author Tom Baeyens
* @author Daniel Meyer
*/
public class RuntimeServiceImpl extends ServiceImpl implements RuntimeService {
public ProcessInstance startProcessInstanceByKey(String processDefinitionKey) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, null, null));
}
public ProcessInstance startProcessInstanceByKey(String processDefinitionKey, String businessKey) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, businessKey, null));
}
public ProcessInstance startProcessInstanceByKey(String processDefinitionKey, Map<String, Object> variables) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, null, variables));
}
public ProcessInstance startProcessInstanceByKey(String processDefinitionKey, String businessKey, Map<String, Object> variables) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, businessKey, variables));
}
public ProcessInstance startProcessInstanceByKeyAndTenantId(String processDefinitionKey, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, null, null, tenantId));
}
public ProcessInstance startProcessInstanceByKeyAndTenantId(String processDefinitionKey, String businessKey, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, businessKey, null, tenantId));
}
public ProcessInstance startProcessInstanceByKeyAndTenantId(String processDefinitionKey, Map<String, Object> variables, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, null, variables, tenantId));
}
public ProcessInstance startProcessInstanceByKeyAndTenantId(String processDefinitionKey, String businessKey, Map<String, Object> variables, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processDefinitionKey, null, businessKey, variables, tenantId));
}
public ProcessInstance startProcessInstanceById(String processDefinitionId) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(null, processDefinitionId, null, null));
}
public ProcessInstance startProcessInstanceById(String processDefinitionId, String businessKey) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(null, processDefinitionId, businessKey, null));
}
public ProcessInstance startProcessInstanceById(String processDefinitionId, Map<String, Object> variables) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(null, processDefinitionId, null, variables));
}
public ProcessInstance startProcessInstanceById(String processDefinitionId, String businessKey, Map<String, Object> variables) {
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(null, processDefinitionId, businessKey, variables));
}
public void deleteProcessInstance(String processInstanceId, String deleteReason) {
commandExecutor.execute(new DeleteProcessInstanceCmd(processInstanceId, deleteReason));
}
public ExecutionQuery createExecutionQuery() {
return new ExecutionQueryImpl(commandExecutor);
}
public NativeExecutionQuery createNativeExecutionQuery() {
return new NativeExecutionQueryImpl(commandExecutor);
}
public NativeProcessInstanceQuery createNativeProcessInstanceQuery() {
return new NativeProcessInstanceQueryImpl(commandExecutor);
}
public void updateBusinessKey(String processInstanceId, String businessKey) {
commandExecutor.execute(new SetProcessInstanceBusinessKeyCmd(processInstanceId, businessKey));
}
public Map<String, Object> getVariables(String executionId) {
return commandExecutor.execute(new GetExecutionVariablesCmd(executionId, null, false));
}
public Map<String, Object> getVariablesLocal(String executionId) {
return commandExecutor.execute(new GetExecutionVariablesCmd(executionId, null, true));
}
public Map<String, Object> getVariables(String executionId, Collection<String> variableNames) {
return commandExecutor.execute(new GetExecutionVariablesCmd(executionId, variableNames, false));
}
public Map<String, Object> getVariablesLocal(String executionId, Collection<String> variableNames) {
return commandExecutor.execute(new GetExecutionVariablesCmd(executionId, variableNames, true));
}
public Object getVariable(String executionId, String variableName) {
return commandExecutor.execute(new GetExecutionVariableCmd(executionId, variableName, false));
}
@Override
public <T> T getVariable(String executionId, String variableName, Class<T> variableClass) {
return variableClass.cast(getVariable(executionId, variableName));
}
@Override
public boolean hasVariable(String executionId, String variableName) {
return commandExecutor.execute(new HasExecutionVariableCmd(executionId, variableName, false));
}
public Object getVariableLocal(String executionId, String variableName) {
return commandExecutor.execute(new GetExecutionVariableCmd(executionId, variableName, true));
}
@Override
public <T> T getVariableLocal(String executionId, String variableName, Class<T> variableClass) {
return variableClass.cast(getVariableLocal(executionId, variableName));
}
@Override
public boolean hasVariableLocal(String executionId, String variableName) {
return commandExecutor.execute(new HasExecutionVariableCmd(executionId, variableName, true));
}
public void setVariable(String executionId, String variableName, Object value) {
if (variableName == null) {
throw new ActivitiIllegalArgumentException("variableName is null");
}
Map<String, Object> variables = new HashMap<String, Object>();
variables.put(variableName, value);
commandExecutor.execute(new SetExecutionVariablesCmd(executionId, variables, false));
}
public void setVariableLocal(String executionId, String variableName, Object value) {
if (variableName == null) {
throw new ActivitiIllegalArgumentException("variableName is null");
}
Map<String, Object> variables = new HashMap<String, Object>();
variables.put(variableName, value);
commandExecutor.execute(new SetExecutionVariablesCmd(executionId, variables, true));
}
public void setVariables(String executionId, Map<String, ? extends Object> variables) {
commandExecutor.execute(new SetExecutionVariablesCmd(executionId, variables, false));
}
public void setVariablesLocal(String executionId, Map<String, ? extends Object> variables) {
commandExecutor.execute(new SetExecutionVariablesCmd(executionId, variables, true));
}
public void removeVariable(String executionId, String variableName) {
Collection<String> variableNames = new ArrayList<String>();
variableNames.add(variableName);
commandExecutor.execute(new RemoveExecutionVariablesCmd(executionId, variableNames, false));
}
public void removeVariableLocal(String executionId, String variableName) {
Collection<String> variableNames = new ArrayList<String>();
variableNames.add(variableName);
commandExecutor.execute(new RemoveExecutionVariablesCmd(executionId, variableNames, true));
}
public void removeVariables(String executionId, Collection<String> variableNames) {
commandExecutor.execute(new RemoveExecutionVariablesCmd(executionId, variableNames, false));
}
public void removeVariablesLocal(String executionId, Collection<String> variableNames) {
commandExecutor.execute(new RemoveExecutionVariablesCmd(executionId, variableNames, true));
}
public void signal(String executionId) {
commandExecutor.execute(new SignalCmd(executionId, null, null, null));
}
public void signal(String executionId, Map<String, Object> processVariables) {
commandExecutor.execute(new SignalCmd(executionId, null, null, processVariables));
}
public void addUserIdentityLink(String processInstanceId, String userId, String identityLinkType) {
commandExecutor.execute(new AddIdentityLinkForProcessInstanceCmd(processInstanceId, userId, null, identityLinkType));
}
public void addGroupIdentityLink(String processInstanceId, String groupId, String identityLinkType) {
commandExecutor.execute(new AddIdentityLinkForProcessInstanceCmd(processInstanceId, null, groupId, identityLinkType));
}
public void addParticipantUser(String processInstanceId, String userId) {
commandExecutor.execute(new AddIdentityLinkForProcessInstanceCmd(processInstanceId, userId, null, IdentityLinkType.PARTICIPANT));
}
public void addParticipantGroup(String processInstanceId, String groupId) {
commandExecutor.execute(new AddIdentityLinkForProcessInstanceCmd(processInstanceId, null, groupId, IdentityLinkType.PARTICIPANT));
}
public void deleteParticipantUser(String processInstanceId, String userId) {
commandExecutor.execute(new DeleteIdentityLinkForProcessInstanceCmd(processInstanceId, userId, null, IdentityLinkType.PARTICIPANT));
}
public void deleteParticipantGroup(String processInstanceId, String groupId) {
commandExecutor.execute(new DeleteIdentityLinkForProcessInstanceCmd(processInstanceId, null, groupId, IdentityLinkType.PARTICIPANT));
}
public void deleteUserIdentityLink(String processInstanceId, String userId, String identityLinkType) {
commandExecutor.execute(new DeleteIdentityLinkForProcessInstanceCmd(processInstanceId, userId, null, identityLinkType));
}
public void deleteGroupIdentityLink(String processInstanceId, String groupId, String identityLinkType) {
commandExecutor.execute(new DeleteIdentityLinkForProcessInstanceCmd(processInstanceId, null, groupId, identityLinkType));
}
public List<IdentityLink> getIdentityLinksForProcessInstance(String processInstanceId) {
return commandExecutor.execute(new GetIdentityLinksForProcessInstanceCmd(processInstanceId));
}
public ProcessInstanceQuery createProcessInstanceQuery() {
return new ProcessInstanceQueryImpl(commandExecutor);
}
public List<String> getActiveActivityIds(String executionId) {
return commandExecutor.execute(new FindActiveActivityIdsCmd(executionId));
}
public FormData getFormInstanceById(String processDefinitionId) {
return commandExecutor.execute(new GetStartFormCmd(processDefinitionId));
}
public void suspendProcessInstanceById(String processInstanceId) {
commandExecutor.execute(new SuspendProcessInstanceCmd(processInstanceId));
}
public void activateProcessInstanceById(String processInstanceId) {
commandExecutor.execute(new ActivateProcessInstanceCmd(processInstanceId));
}
public ProcessInstance startProcessInstanceByMessage(String messageName) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, null, null, null));
}
public ProcessInstance startProcessInstanceByMessageAndTenantId(String messageName, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, null, null, tenantId));
}
public ProcessInstance startProcessInstanceByMessage(String messageName, String businessKey) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, businessKey, null, null));
}
public ProcessInstance startProcessInstanceByMessageAndTenantId(String messageName, String businessKey, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, businessKey, null, tenantId));
}
public ProcessInstance startProcessInstanceByMessage(String messageName, Map<String, Object> processVariables) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, null, processVariables, null));
}
public ProcessInstance startProcessInstanceByMessageAndTenantId(String messageName, Map<String, Object> processVariables, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, null, processVariables, tenantId));
}
public ProcessInstance startProcessInstanceByMessage(String messageName, String businessKey, Map<String, Object> processVariables) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, businessKey, processVariables, null));
}
@Override
public ProcessInstance startProcessInstanceByMessageAndTenantId(String messageName, String businessKey,
Map<String, Object> processVariables, String tenantId) {
return commandExecutor.execute(new StartProcessInstanceByMessageCmd(messageName, businessKey, processVariables, tenantId));
}
public void signalEventReceived(String signalName) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, null, null));
}
public void signalEventReceivedWithTenantId(String signalName, String tenantId) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, null, tenantId));
}
public void signalEventReceivedAsync(String signalName) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, true, null));
}
public void signalEventReceivedAsyncWithTenantId(String signalName, String tenantId) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, true, tenantId));
}
public void signalEventReceived(String signalName, Map<String, Object> processVariables) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, processVariables, null));
}
public void signalEventReceivedWithTenantId(String signalName,
Map<String, Object> processVariables, String tenantId) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, null, processVariables, tenantId));
}
public void signalEventReceived(String signalName, String executionId) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, executionId, null, null));
}
public void signalEventReceived(String signalName, String executionId, Map<String, Object> processVariables) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, executionId, processVariables, null));
}
public void signalEventReceivedAsync(String signalName, String executionId) {
commandExecutor.execute(new SignalEventReceivedCmd(signalName, executionId, true, null));
}
public void messageEventReceived(String messageName, String executionId) {
commandExecutor.execute(new MessageEventReceivedCmd(messageName, executionId, null));
}
public void messageEventReceived(String messageName, String executionId, Map<String, Object> processVariables) {
commandExecutor.execute(new MessageEventReceivedCmd(messageName, executionId, processVariables));
}
public void messageEventReceivedAsync(String messageName, String executionId) {
commandExecutor.execute(new MessageEventReceivedCmd(messageName, executionId, true));
}
@Override
public void addEventListener(ActivitiEventListener listenerToAdd) {
commandExecutor.execute(new AddEventListenerCommand(listenerToAdd));
}
@Override
public void addEventListener(ActivitiEventListener listenerToAdd, ActivitiEventType... types) {
commandExecutor.execute(new AddEventListenerCommand(listenerToAdd, types));
}
@Override
public void removeEventListener(ActivitiEventListener listenerToRemove) {
commandExecutor.execute(new RemoveEventListenerCommand(listenerToRemove));
}
@Override
public void dispatchEvent(ActivitiEvent event) {
commandExecutor.execute(new DispatchEventCommand(event));
}
@Override
public void setProcessInstanceName(String processInstanceId, String name) {
commandExecutor.execute(new SetProcessInstanceNameCmd(processInstanceId, name));
}
@Override
public List<Event> getProcessInstanceEvents(String processInstanceId) {
return commandExecutor.execute(new GetProcessInstanceEventsCmd(processInstanceId));
}
@Override
public ProcessInstanceBuilder createProcessInstanceBuilder() {
return new ProcessInstanceBuilderImpl(this);
}
public ProcessInstance startProcessInstance(ProcessInstanceBuilderImpl processInstanceBuilder){
return commandExecutor.execute(new StartProcessInstanceCmd<ProcessInstance>(processInstanceBuilder));
}
}
| ahwxl/deep | src/main/java/org/activiti/engine/impl/RuntimeServiceImpl.java | Java | apache-2.0 | 19,618 |
/*
Copyright 2017 WALLIX
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"fmt"
"os"
"path/filepath"
"strconv"
"github.com/wallix/awless/aws/services"
"github.com/wallix/awless/database"
)
var (
AwlessHome = filepath.Join(os.Getenv("HOME"), ".awless")
DBPath = filepath.Join(AwlessHome, database.Filename)
Dir = filepath.Join(AwlessHome, "aws")
KeysDir = filepath.Join(AwlessHome, "keys")
AwlessFirstInstall bool
)
func init() {
os.Setenv("__AWLESS_HOME", AwlessHome)
os.Setenv("__AWLESS_CACHE", filepath.Join(AwlessHome, "cache"))
os.Setenv("__AWLESS_KEYS_DIR", KeysDir)
}
func InitAwlessEnv() error {
_, err := os.Stat(DBPath)
AwlessFirstInstall = os.IsNotExist(err)
os.Setenv("__AWLESS_FIRST_INSTALL", strconv.FormatBool(AwlessFirstInstall))
os.MkdirAll(KeysDir, 0700)
if AwlessFirstInstall {
fmt.Fprintln(os.Stderr, AWLESS_ASCII_LOGO)
fmt.Fprintln(os.Stderr, "Welcome! Resolving environment data...")
fmt.Fprintln(os.Stderr)
if err = InitConfig(resolveRequiredConfigFromEnv()); err != nil {
return err
}
err = database.Execute(func(db *database.DB) error {
return db.SetStringValue("current.version", Version)
})
if err != nil {
fmt.Fprintf(os.Stderr, "cannot store current version in db: %s\n", err)
}
}
if err = LoadConfig(); err != nil {
return err
}
return nil
}
func resolveRequiredConfigFromEnv() map[string]string {
region := awsservices.ResolveRegionFromEnv()
resolved := make(map[string]string)
if region != "" {
resolved[RegionConfigKey] = region
}
return resolved
}
| wallix/awless | config/init.go | GO | apache-2.0 | 2,106 |
/*
* Copyright Siemens AG, 2014
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siemens.oss.omniproperties.builders;
import java.io.File;
import java.io.IOException;
import com.siemens.oss.omniproperties.ObjectBuilder;
/**
* @author Markus Michael Geipel
*
*/
public class ExistingFile implements ObjectBuilder<File> {
final private File file;
public ExistingFile(final String path) {
this.file = new File(path);
}
public ExistingFile(final File file) {
this.file = file;
}
public ExistingFile(final File dir, final String path) {
this.file = new File(dir, path);
}
@Override
public File build() throws IOException {
if (!file.exists()) {
throw new IllegalArgumentException("File '" + file
+ "' does not exist");
}
return new File(file.getCanonicalPath());
}
}
| siemens/omniproperties | src/main/java/com/siemens/oss/omniproperties/builders/ExistingFile.java | Java | apache-2.0 | 1,314 |
package natlab.tame.valueanalysis.aggrvalue;
import natlab.tame.valueanalysis.ValueSet;
import natlab.tame.valueanalysis.components.constant.Constant;
import natlab.tame.valueanalysis.components.isComplex.isComplexInfoFactory;
import natlab.tame.valueanalysis.components.shape.ShapeFactory;
import natlab.tame.valueanalysis.value.ValueFactory;
import natlab.toolkits.path.FunctionReference;
/**
* @author ant6n
*
* extended by XU to support symbolic info. @ 6:26pm March 9th 2013
* TODO why we have abstract method newMatrixValue and getValuePropagator here,
* since we already have these two abstract methods in ValueFactory class.
* remove them?
*/
public abstract class AggrValueFactory<D extends MatrixValue<D>> extends ValueFactory<AggrValue<D>> {
/**
* constructor builds shape factor
*/
ShapeFactory shapeFactory;
isComplexInfoFactory isComplexFactory;
public AggrValueFactory(){
this.shapeFactory = new ShapeFactory();
this.isComplexFactory = new isComplexInfoFactory(); //added by Vineet
}
/**
* constructs a new Primitive Value from a constant, extended to support symbolic.
* @param constant
*/
abstract public D newMatrixValue(String symbolic, Constant constant);
/**
* returns a ValuePropagator
* This should always be an AggrValuePropagator, containing a matrix value propagator
*/
abstract public AggrValuePropagator<D> getValuePropagator();
/**
* creates a function handle value
*/
public FunctionHandleValue<D> newFunctionHandleValue(FunctionReference f){
return new FunctionHandleValue<D>(this, f);
}
/**
* creates a function handle value, but already supplies some arguments (partial application)
*/
public FunctionHandleValue<D> newFunctionHandlevalue(FunctionReference f,java.util.List<ValueSet<AggrValue<D>>> partialValues){
return new FunctionHandleValue<D>(this,f,partialValues);
}
/**
* creates an empty struct
*/
public StructValue<D> newStruct(){
return new StructValue<D>(this);
}
/**
* creates an empty cell array
*/
public CellValue<D> newCell(){
return new CellValue<D>(this);
}
/**
* returns the shape factory
*/
public ShapeFactory getShapeFactory(){
return shapeFactory;
}
public isComplexInfoFactory getIsComplexInfoFactory(){
return isComplexFactory;
}
}
| Sable/mclab-core | languages/Natlab/src/natlab/tame/valueanalysis/aggrvalue/AggrValueFactory.java | Java | apache-2.0 | 2,482 |
package cc_messages
import (
"encoding/json"
"code.cloudfoundry.org/bbs/models"
)
type StagingErrorID string
const (
STAGING_ERROR StagingErrorID = "StagingError"
INSUFFICIENT_RESOURCES StagingErrorID = "InsufficientResources"
NO_COMPATIBLE_CELL StagingErrorID = "NoCompatibleCell"
CELL_COMMUNICATION_ERROR StagingErrorID = "CellCommunicationError"
BUILDPACK_DETECT_FAILED StagingErrorID = "NoAppDetectedError"
BUILDPACK_COMPILE_FAILED StagingErrorID = "BuildpackCompileFailed"
BUILDPACK_RELEASE_FAILED StagingErrorID = "BuildpackReleaseFailed"
)
type StagingError struct {
Id StagingErrorID `json:"id"`
Message string `json:"message"`
}
type StagingRequestFromCC struct {
AppId string `json:"app_id"`
FileDescriptors int `json:"file_descriptors"`
MemoryMB int `json:"memory_mb"`
DiskMB int `json:"disk_mb"`
Environment []*models.EnvironmentVariable `json:"environment"`
EgressRules []*models.SecurityGroupRule `json:"egress_rules,omitempty"`
Timeout int `json:"timeout"`
LogGuid string `json:"log_guid"`
Lifecycle string `json:"lifecycle"`
LifecycleData *json.RawMessage `json:"lifecycle_data,omitempty"`
CompletionCallback string `json:"completion_callback"`
}
type BuildpackStagingData struct {
AppBitsDownloadUri string `json:"app_bits_download_uri"`
BuildArtifactsCacheDownloadUri string `json:"build_artifacts_cache_download_uri,omitempty"`
BuildArtifactsCacheUploadUri string `json:"build_artifacts_cache_upload_uri"`
Buildpacks []Buildpack `json:"buildpacks"`
DropletUploadUri string `json:"droplet_upload_uri"`
Stack string `json:"stack"`
}
type DockerStagingData struct {
DockerImageUrl string `json:"docker_image"`
DockerLoginServer string `json:"docker_login_server,omitempty"`
DockerUser string `json:"docker_user,omitempty"`
DockerPassword string `json:"docker_password,omitempty"`
DockerEmail string `json:"docker_email,omitempty"`
}
const CUSTOM_BUILDPACK = "custom"
type Buildpack struct {
Name string `json:"name"`
Key string `json:"key"`
Url string `json:"url"`
SkipDetect bool `json:"skip_detect"`
}
type StagingResponseForCC struct {
Error *StagingError `json:"error,omitempty"`
Result *json.RawMessage `json:"result,omitempty"`
}
type StagingTaskAnnotation struct {
Lifecycle string `json:"lifecycle"`
CompletionCallback string `json:"completion_callback"`
}
| cf-furnace/k8s-stager | vendor/code.cloudfoundry.org/runtimeschema/cc_messages/staging_messages.go | GO | apache-2.0 | 2,818 |
// Copyright 2007 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Base class for UI MDL controls.
* @author buntarb@gmail.com (Artem Lytvynov)
*/
/**********************************************************************************************************************
* Provide section *
**********************************************************************************************************************/
goog.provide( 'zz.ui.mdl.ControlRenderer' );
/**********************************************************************************************************************
* Dependencies section *
**********************************************************************************************************************/
goog.require( 'goog.ui.registry' );
goog.require( 'goog.ui.ControlRenderer' );
/**********************************************************************************************************************
* Renderer definition section *
**********************************************************************************************************************/
/**
* Default renderer for {@link zz.ui.mdl.Control}s. Extends the superclass to support checkbox states.
* @constructor
* @extends {goog.ui.ControlRenderer}
*/
zz.ui.mdl.ControlRenderer = function( ){
zz.ui.mdl.ControlRenderer.base( this, 'constructor' );
};
goog.inherits( zz.ui.mdl.ControlRenderer, goog.ui.ControlRenderer );
goog.addSingletonGetter( zz.ui.mdl.ControlRenderer );
/**********************************************************************************************************************
* Life cycle methods *
**********************************************************************************************************************/
/**
* @override
*/
zz.ui.mdl.ControlRenderer.prototype.createDom = function( ){
goog.base( this, 'createDom' );
};
/**
* @override
*/
zz.ui.mdl.ControlRenderer.prototype.canDecorate = function( ){
//TODO: add check of the element
return true;
};
/**
* @param {zz.ui.mdl.Control} control
* @param {Element} element
* @override
*/
zz.ui.mdl.ControlRenderer.prototype.decorate = function( control, element ){
// Input element.
control.setInputElement( control.getDomHelper( ).getElementsByTagNameAndClass(
goog.dom.TagName.INPUT,
undefined,
element )[ 0 ]
);
return goog.base( this, 'decorate', control, element );
};
/**********************************************************************************************************************
* Helpers methods *
**********************************************************************************************************************/
/**
* Updates the appearance of the control in response to a state change.
* @param {zz.ui.mdl.Control} control Control instance to update.
* @param {goog.ui.Component.State} state State to enable or disable.
* @param {boolean} enable Whether the control is entering or exiting the state.
* @override
*/
zz.ui.mdl.ControlRenderer.prototype.setState = function( control, state, enable ){
var element = control.getElement( );
if( element ){
// TODO (user): Here we can/must add necessary classes for state.
this.updateAriaState(element, state, enable);
}
};
/**
* Returns the element within the component's DOM that should receive keyboard focus (null if none). The default
* implementation returns the control's root element.
* @param {zz.ui.mdl.Control} control Control whose key event target is to be returned.
* @return {Element} The key event target.
* @override
*/
zz.ui.mdl.ControlRenderer.prototype.getKeyEventTarget = function( control ){
return control.getInputElement( );
};
/**
* Set control input element value.
* @param {zz.ui.mdl.Control} control
* @param {*} value
*/
zz.ui.mdl.ControlRenderer.prototype.setValue = function( control, value ){
control.getInputElement( ).value = value;
};
/**
* Return control input element value.
* @param {zz.ui.mdl.Control} control
* @returns {*} value
*/
zz.ui.mdl.ControlRenderer.prototype.getValue = function( control ){
return control.getInputElement( ).value;
}; | alex-popkov/zz.ui.mdl | lib/sources/controlrenderer.js | JavaScript | apache-2.0 | 5,156 |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bingo.lang;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import bingo.lang.exceptions.ParseException;
import bingo.lang.testing.junit.ConcurrentTestCase;
/**
* @author Calvin Chen
*/
public class DatesTest extends ConcurrentTestCase {
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
/**
* Test method for {@link bingo.lang.Dates#format(java.util.Date)}.
*/
@Test
public void testFormatDate() {
// fail("Not yet implemented"); // TODO
}
/**
* Test method for {@link bingo.lang.Dates#format(java.util.Date, java.lang.String)}.
*/
@Test
public void testFormatDateString() {
// fail("Not yet implemented"); // TODO
}
/**
* Test method for {@link bingo.lang.Dates#parse(java.lang.String)}.
*/
@Test
public void testParseString() {
assertNotNull(Dates.parse("2012-05-08"));
assertNotNull(Dates.parse("2000-01-01"));
try {
Dates.parse("2012-13-08");
fail();
} catch (ParseException e2) {
}
try {
Dates.parse("2012-05-33");
fail();
} catch (ParseException e1) {
}
try {
Dates.parse("0000-05-08");
fail();
} catch (ParseException e) {
}
assertNotNull(Dates.parse("2012-5-8"));
try {
Dates.parse("2012-05");
fail();
} catch (ParseException e) {
}
assertNotNull(Dates.parse("11:16:01"));
assertNotNull(Dates.parse("01:01:01"));
assertNotNull(Dates.parse("2012-05-10 14:56:02"));
}
/**
* Test method for {@link bingo.lang.Dates#parseOrNull(java.lang.String)}.
*/
@Test
public void testParseOrNullString() {
// fail("Not yet implemented"); // TODO
}
/**
* Test method for {@link bingo.lang.Dates#parse(java.lang.String, java.lang.String[])}.
*/
@Test
public void testParseStringStringArray() {
// fail("Not yet implemented"); // TODO
}
/**
* Test method for {@link bingo.lang.Dates#parseOrNull(java.lang.String, java.lang.String[])}.
*/
@Test
public void testParseOrNullStringStringArray() {
// fail("Not yet implemented"); // TODO
}
}
| bingo-open-source/bingo-core | core-lang/src/test/java/bingo/lang/DatesTest.java | Java | apache-2.0 | 2,905 |
/*
* Copyright 2006-2016 Edward Smith
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package root.data.verifiction;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import root.adt.SetCharArray;
import root.util.Root;
/**
* This domain verifier validates a domain based upon its list of dot-separated DNS labels, each label being limited to a length of 63 characters and
* contains characters that include: *
* <ul>
* <li>-</li>
* <li>0 thru 9</li>
* <li>A thru Z and a thru z</li>
* </ul>
*
* @author Edward Smith
* @version 0.5
* @since 0.5
*/
public final class DomainVerifier {
// <><><><><><><><><><><><><><><> Constants <><><><><><><><><><><><><><><>
/**
* Valid characters for a domain include:
* <ul>
* <li>-</li>
* <li>0 thru 9</li>
* <li>A thru Z and a thru z</li>
* </ul>
* <p>
* <b>NOTE:</b> Two periods cannot come next to each other
*/
private static final boolean[] validDomainChar = {
// ASCII 0 thru 15
false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false,
// ASCII 16 thru 31
false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false,
// ASCII 32 thru 47
false, false, false, false, false, false, false, false, false, false, false, false, false, true, false, false,
// ASCII 48 thru 63
true, true, true, true, true, true, true, true, true, true, false, false, false, false, false, false,
// ASCII 64 thru 79
false, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true,
// ASCI 80 thru 95
true, true, true, true, true, true, true, true, true, true, true, false, false, false, false, false,
// ASCII 96 thru 111
false, true, true, true, true, true, true, true, true, true, true, true, true, true, true, true,
// ASCII 112 thru 127
true, true, true, true, true, true, true, true, true, true, true, false, false, false, false, false };
private static final SetCharArray validTLDSet = new SetCharArray();
static {
final InputStream is = Root.getResourceAsStream(DomainVerifier.class, "tld.properties");
try (final BufferedReader tld = Root.getBufferedReader(is)) {
for (String line = tld.readLine(); line != null; line = tld.readLine()) {
if (line.length() > 0 && line.charAt(0) != '#') {
validTLDSet.add(line.toCharArray());
}
}
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
// <><><><><><><><><><><><><><> Public Methods <><><><><><><><><><><><><><>
public static final VerificationError verify(final String domain, final String fieldName) {
final int domainLength = domain.length();
// Minimum length of two characters
if (domain.length() < 2) {
return new MinimumLengthError(domain, fieldName, 2);
}
// Maximum length of 253 characters
if (domainLength > 253) {
return new MaximumLengthError(domain, fieldName, 253);
}
// Domain cannot begin or end with a -
if (domain.charAt(0) == '-' || domain.charAt(domainLength - 1) == '-') {
return new FormatError(domain, fieldName, "Domain cannot begin or end with a dash");
}
final char[][] labels = Root.split(domain, '.');
// Must have at least two labels to the domain
if (labels.length < 2) {
return new FormatError(domain, fieldName, "Domain must have at least two labels");
}
// Verify each label in the domain
for (final char[] label : labels) {
// Each label cannot be greater than 63 characters
if (label.length > 63) {
return new FormatError(domain, fieldName, "Domain label length is greater than 63 characters");
}
for (final char ch : label) {
if (ch < 128 && !validDomainChar[ch]) {
return new FormatError(domain, fieldName, "Invalid character detected");
}
}
}
return null;
}
} // End DomainVerifier
| macvelli/RootFramework | src/root/data/verifiction/DomainVerifier.java | Java | apache-2.0 | 4,412 |
//
// PropManager.cpp
// WittedMouse
//
// Created by wc on 15/2/25.
//
//
#include "PropManager.h"
#include "Prop.h"
#include <stdio.h>
bool PropManager::init() {
if (!Sprite::init())
return false;
fertilizers = PropContainer::createPropContainer(FERTILIZER);
block = PropContainer::createPropContainer(BLOCK);
magent = PropContainer::createPropContainer(MAGNET);
this->addChild(fertilizers);
this->addChild(block);
this->addChild(magent);
this->setPosition(0, 0);
prop = nullptr;
size = Director::getInstance()->getVisibleSize();
return true;
}
void PropManager::addProp(Attribute attribute) {
switch (attribute) {
case FERTILIZER:
this->fertilizers->addAProp();
break;
case BLOCK:
this->block->addAProp();
break;
default:
this->magent->addAProp();
break;
}
update();
}
void PropManager::deleteProp(Attribute attribute) {
switch (attribute) {
case FERTILIZER:
this->fertilizers->deleteAProp();
break;
case BLOCK:
this->block->deleteAProp();
break;
default:
this->magent->deleteAProp();
break;
}
update();
}
void PropManager::update() {
this->fertilizers->updateContainer();
this->block->updateContainer();
this->magent->updateContainer();
}
void PropManager::propContainerTouchEvent(Touch *touch, Event *event) {
auto touchLocation = touch->getLocation();
auto fertiPosition = fertilizers->getPosition();
auto fertiSize = fertilizers->getContentSize();
auto blockPosition = block->getPosition();
auto blockSize = block->getContentSize();
auto magentPosition = magent->getPosition();
auto magentSize = magent->getContentSize();
PropContainer* container = nullptr;
if (touchLocation.x >= fertiPosition.x + 2 && touchLocation.x <= fertiPosition.x + fertiSize.width - 2) {
if (touchLocation.y >= fertiPosition.y + 2 && touchLocation.y <= fertiPosition.y + fertiSize.height - 2) {
container = fertilizers;
}
}
if (container == nullptr && touchLocation.x >= blockPosition.x + 2 && touchLocation.x <= blockPosition.x + blockSize.width - 2) {
if (touchLocation.y >= blockPosition.y + 2 && touchLocation.y <= blockPosition.y + blockSize.height - 2) {
container = block;
}
}
if (container == nullptr && touchLocation.x >= magentPosition.x + 2 && touchLocation.x <= magentPosition.x + magentSize.width - 2) {
if (touchLocation.y >= magentPosition.y + 2 && touchLocation.y <= magentPosition.y + magentSize.height - 2) {
container = magent;
}
}
if (container != nullptr) {
// Container do action
container->stopAllActions();
auto sacleTo1 = ScaleTo::create(0.2, 1.1);
auto sacleTo2 = ScaleTo::create(0.2, 1);
auto seq = Sequence::create(sacleTo1, sacleTo2, NULL);
container->runAction(seq);
// Mouse and cucrbit do action
this->deleteProp(container->getTheAttribute());
}
}
void PropManager::propTouchEvent(Touch *touch, Event *event) {
if (prop && prop->getIsTouch() == false) {
auto touchLocation = touch->getLocation();
auto propLocation = prop->getPosition();
auto propSize = prop->getContentSize();
if (propLocation.x <= touchLocation.x && propLocation.x + propSize.width >= touchLocation.x) {
if (propLocation.y <= touchLocation.y && propLocation.y + propSize.height >= touchLocation.y) {
prop->setIsTouch(true);
this->addProp(prop->getAttribute());
if (prop->getAttribute() == FERTILIZER) {
prop->burst(fertilizers->getPosition(), fertilizers->getContentSize());
}
else if (prop->getAttribute() == BLOCK) {
prop->burst(block->getPosition(), block->getContentSize());
}
else {
prop->burst(magent->getPosition(), magent->getContentSize());
}
prop = nullptr;
this->update();
}
}
}
}
void PropManager::propCreator(float dt) {
prop = Prop::create();
prop->run();
this->addChild(prop);
} | JieweiWei/TheWittyMouse | Classes/PropManager.cpp | C++ | apache-2.0 | 3,814 |
package org.simpleframework.common.buffer;
import junit.framework.TestCase;
public class FileByteQueueTest extends TestCase {
public void testQueue() throws Exception {
/* Allocator allocator = new FileAllocator();
FileByteQueue queue = new FileByteQueue(allocator);
for(int i = 0; i < 26; i++) {
queue.write(new byte[]{(byte)(i+'a')}, 0, 1);
System.err.println("WRITE>>"+(char)(i+'a'));
}
for(int i = 0; i < 26; i++) {
byte[] buffer = new byte[1];
assertEquals(queue.read(buffer, 0, 1), 1);
System.err.println("READ>>"+((char)buffer[0]));
assertEquals(buffer[0], (byte)(i+'a'));
}*/
}
}
| ggeorg/chillverse | simple-common/src/test/java/org/simpleframework/common/buffer/FileByteQueueTest.java | Java | apache-2.0 | 690 |
#include "Globals.h" // NOTE: MSVC stupidness requires this to be the same across all modules
#include "BlockID.h"
#include "World.h"
#include "ChunkDef.h"
#include "ClientHandle.h"
#include "Server.h"
#include "Item.h"
#include "Root.h"
#include "IniFile.h"
#include "ChunkMap.h"
#include "Generating/ChunkDesc.h"
#include "SetChunkData.h"
// Serializers
#include "WorldStorage/ScoreboardSerializer.h"
// Entities (except mobs):
#include "Entities/ExpOrb.h"
#include "Entities/FallingBlock.h"
#include "Entities/Minecart.h"
#include "Entities/Pickup.h"
#include "Entities/Player.h"
#include "Entities/TNTEntity.h"
#include "BlockEntities/CommandBlockEntity.h"
#include "BlockEntities/BeaconEntity.h"
// Simulators:
#include "Simulator/SimulatorManager.h"
#include "Simulator/FloodyFluidSimulator.h"
#include "Simulator/FluidSimulator.h"
#include "Simulator/FireSimulator.h"
#include "Simulator/NoopFluidSimulator.h"
#include "Simulator/NoopRedstoneSimulator.h"
#include "Simulator/SandSimulator.h"
#include "Simulator/IncrementalRedstoneSimulator.h"
#include "Simulator/VanillaFluidSimulator.h"
#include "Simulator/VaporizeFluidSimulator.h"
// Mobs:
#include "Mobs/IncludeAllMonsters.h"
#include "MobCensus.h"
#include "MobSpawner.h"
#include "Generating/Trees.h"
#include "Bindings/PluginManager.h"
#include "Blocks/BlockHandler.h"
#include "Tracer.h"
// DEBUG: Test out the cLineBlockTracer class by tracing a few lines:
#include "LineBlockTracer.h"
#ifndef _WIN32
#include <stdlib.h>
#endif
#include "Broadcaster.h"
const int TIME_SUNSET = 12000;
const int TIME_NIGHT_START = 13187;
const int TIME_NIGHT_END = 22812;
const int TIME_SUNRISE = 23999;
const int TIME_SPAWN_DIVISOR = 148;
////////////////////////////////////////////////////////////////////////////////
// cSpawnPrepare:
/** Generates and lights the spawn area of the world. Runs as a separate thread. */
class cSpawnPrepare:
public cIsThread,
public cChunkCoordCallback
{
typedef cIsThread super;
public:
cSpawnPrepare(cWorld & a_World, int a_SpawnChunkX, int a_SpawnChunkZ, int a_PrepareDistance):
super("SpawnPrepare"),
m_World(a_World),
m_SpawnChunkX(a_SpawnChunkX),
m_SpawnChunkZ(a_SpawnChunkZ),
m_PrepareDistance(a_PrepareDistance),
m_MaxIdx(a_PrepareDistance * a_PrepareDistance),
m_NumPrepared(0),
m_LastReportChunkCount(0)
{
// Start the thread:
Start();
// Wait for start confirmation, so that the thread can be waited-upon after the constructor returns:
m_EvtStarted.Wait();
}
// cIsThread override:
virtual void Execute(void) override
{
// Confirm thread start:
m_EvtStarted.Set();
// Queue the initial chunks:
m_MaxIdx = m_PrepareDistance * m_PrepareDistance;
int maxQueue = std::min(m_MaxIdx - 1, 100); // Number of chunks to queue at once
m_NextIdx = maxQueue;
m_LastReportTime = std::chrono::steady_clock::now();
for (int i = 0; i < maxQueue; i++)
{
int chunkX, chunkZ;
DecodeChunkCoords(i, chunkX, chunkZ);
m_World.PrepareChunk(chunkX, chunkZ, this);
} // for i
// Wait for the lighting thread to prepare everything. Event is set in the Call() callback:
m_EvtFinished.Wait();
}
protected:
cWorld & m_World;
int m_SpawnChunkX;
int m_SpawnChunkZ;
int m_PrepareDistance;
/** The index of the next chunk to be queued in the lighting thread. */
int m_NextIdx;
/** The maximum index of the prepared chunks. Queueing stops when m_NextIdx reaches this number. */
int m_MaxIdx;
/** Total number of chunks already finished preparing. Preparation finishes when this number reaches m_MaxIdx. */
int m_NumPrepared;
/** Event used to signal that the thread has started. */
cEvent m_EvtStarted;
/** Event used to signal that the preparation is finished. */
cEvent m_EvtFinished;
/** The timestamp of the last progress report emitted. */
std::chrono::steady_clock::time_point m_LastReportTime;
/** Number of chunks prepared when the last progress report was emitted. */
int m_LastReportChunkCount;
// cChunkCoordCallback override:
virtual void Call(int a_ChunkX, int a_ChunkZ) override
{
// Check if this was the last chunk:
m_NumPrepared += 1;
if (m_NumPrepared >= m_MaxIdx)
{
m_EvtFinished.Set();
// Must return here, because "this" may have gotten deleted by the previous line
return;
}
// Queue another chunk, if appropriate:
if (m_NextIdx < m_MaxIdx)
{
int chunkX, chunkZ;
DecodeChunkCoords(m_NextIdx, chunkX, chunkZ);
m_World.GetLightingThread().QueueChunk(chunkX, chunkZ, this);
m_NextIdx += 1;
}
// Report progress every 1 second:
auto Now = std::chrono::steady_clock::now();
if (Now - m_LastReportTime > std::chrono::seconds(1))
{
float PercentDone = static_cast<float>(m_NumPrepared * 100) / m_MaxIdx;
float ChunkSpeed = static_cast<float>((m_NumPrepared - m_LastReportChunkCount) * 1000) / std::chrono::duration_cast<std::chrono::milliseconds>(Now - m_LastReportTime).count();
LOG("Preparing spawn (%s): %.02f%% (%d/%d; %.02f chunks / sec)",
m_World.GetName().c_str(), PercentDone, m_NumPrepared, m_MaxIdx, ChunkSpeed
);
m_LastReportTime = Now;
m_LastReportChunkCount = m_NumPrepared;
}
}
/** Decodes the index into chunk coords. Provides the specific chunk ordering. */
void DecodeChunkCoords(int a_Idx, int & a_ChunkX, int & a_ChunkZ)
{
// A zigzag pattern from the top to bottom, each row alternating between forward-x and backward-x:
int z = a_Idx / m_PrepareDistance;
int x = a_Idx % m_PrepareDistance;
if ((z & 1) == 0)
{
// Reverse every second row:
x = m_PrepareDistance - 1 - x;
}
a_ChunkZ = m_SpawnChunkZ + z - m_PrepareDistance / 2;
a_ChunkX = m_SpawnChunkX + x - m_PrepareDistance / 2;
}
};
////////////////////////////////////////////////////////////////////////////////
// cWorld::cLock:
cWorld::cLock::cLock(cWorld & a_World) :
super(&(a_World.m_ChunkMap->GetCS()))
{
}
////////////////////////////////////////////////////////////////////////////////
// cWorld::cTickThread:
cWorld::cTickThread::cTickThread(cWorld & a_World) :
super(Printf("WorldTickThread: %s", a_World.GetName().c_str())),
m_World(a_World)
{
}
void cWorld::cTickThread::Execute(void)
{
auto LastTime = std::chrono::steady_clock::now();
auto TickTime = std::chrono::duration_cast<std::chrono::milliseconds>(cTickTime(1));
while (!m_ShouldTerminate)
{
auto NowTime = std::chrono::steady_clock::now();
auto WaitTime = std::chrono::duration_cast<std::chrono::milliseconds>(NowTime - LastTime);
m_World.Tick(WaitTime, TickTime);
TickTime = std::chrono::duration_cast<std::chrono::milliseconds>(std::chrono::steady_clock::now() - NowTime);
if (TickTime < cTickTime(1))
{
// Stretch tick time until it's at least 1 tick
std::this_thread::sleep_for(cTickTime(1) - TickTime);
}
LastTime = NowTime;
}
}
////////////////////////////////////////////////////////////////////////////////
// cWorld:
cWorld::cWorld(const AString & a_WorldName, eDimension a_Dimension, const AString & a_LinkedOverworldName) :
m_WorldName(a_WorldName),
m_LinkedOverworldName(a_LinkedOverworldName),
m_IniFileName(m_WorldName + "/world.ini"),
m_StorageSchema("Default"),
#ifdef __arm__
m_StorageCompressionFactor(0),
#else
m_StorageCompressionFactor(6),
#endif
m_Dimension(a_Dimension),
m_IsSpawnExplicitlySet(false),
m_SpawnX(0),
m_SpawnY(0),
m_SpawnZ(0),
m_BroadcastDeathMessages(true),
m_BroadcastAchievementMessages(true),
m_IsDaylightCycleEnabled(true),
m_WorldAge(0),
m_TimeOfDay(0),
m_LastTimeUpdate(0),
m_LastUnload(0),
m_LastSave(0),
m_SkyDarkness(0),
m_GameMode(gmNotSet),
m_bEnabledPVP(false),
m_IsDeepSnowEnabled(false),
m_ShouldLavaSpawnFire(true),
m_VillagersShouldHarvestCrops(true),
m_SimulatorManager(),
m_SandSimulator(),
m_WaterSimulator(nullptr),
m_LavaSimulator(nullptr),
m_FireSimulator(),
m_RedstoneSimulator(nullptr),
m_MaxPlayers(10),
m_ChunkMap(),
m_bAnimals(true),
m_Weather(eWeather_Sunny),
m_WeatherInterval(24000), // Guaranteed 1 day of sunshine at server start :)
m_MaxCactusHeight(3),
m_MaxSugarcaneHeight(4),
m_IsCactusBonemealable(false),
m_IsCarrotsBonemealable(true),
m_IsCropsBonemealable(true),
m_IsGrassBonemealable(true),
m_IsMelonStemBonemealable(true),
m_IsMelonBonemealable(true),
m_IsPotatoesBonemealable(true),
m_IsPumpkinStemBonemealable(true),
m_IsPumpkinBonemealable(true),
m_IsSaplingBonemealable(true),
m_IsSugarcaneBonemealable(false),
m_bCommandBlocksEnabled(true),
m_bUseChatPrefixes(false),
m_TNTShrapnelLevel(slNone),
m_MaxViewDistance(12),
m_Scoreboard(this),
m_MapManager(this),
m_GeneratorCallbacks(*this),
m_TickThread(*this)
{
LOGD("cWorld::cWorld(\"%s\")", a_WorldName.c_str());
cFile::CreateFolder(FILE_IO_PREFIX + m_WorldName);
// Load the scoreboard
cScoreboardSerializer Serializer(m_WorldName, &m_Scoreboard);
Serializer.Load();
}
cWorld::~cWorld()
{
delete m_WaterSimulator; m_WaterSimulator = nullptr;
delete m_LavaSimulator; m_LavaSimulator = nullptr;
delete m_RedstoneSimulator; m_RedstoneSimulator = nullptr;
UnloadUnusedChunks();
m_Storage.WaitForFinish();
// Unload the scoreboard
cScoreboardSerializer Serializer(m_WorldName, &m_Scoreboard);
Serializer.Save();
m_MapManager.SaveMapData();
// Explicitly destroy the chunkmap, so that it's guaranteed to be destroyed before the other internals
// This fixes crashes on stopping the server, because chunk destructor deletes entities and those access the world.
m_ChunkMap.reset();
}
void cWorld::CastThunderbolt (int a_BlockX, int a_BlockY, int a_BlockZ)
{
BroadcastThunderbolt(a_BlockX, a_BlockY, a_BlockZ);
BroadcastSoundEffect("ambient.weather.thunder", a_BlockX, a_BlockY, a_BlockZ, 50, 1);
}
int cWorld::GetDefaultWeatherInterval(eWeather a_Weather)
{
switch (a_Weather)
{
case eWeather_Sunny:
{
return 14400 + (m_TickRand.randInt() % 4800); // 12 - 16 minutes
}
case eWeather_Rain:
{
return 9600 + (m_TickRand.randInt() % 7200); // 8 - 14 minutes
}
case eWeather_ThunderStorm:
{
return 2400 + (m_TickRand.randInt() % 4800); // 2 - 6 minutes
}
}
LOGWARNING("%s: Missing default weather interval for weather %d.", __FUNCTION__, a_Weather);
return -1;
}
void cWorld::SetWeather(eWeather a_NewWeather)
{
// Do the plugins agree? Do they want a different weather?
if (cRoot::Get()->GetPluginManager()->CallHookWeatherChanging(*this, a_NewWeather))
{
m_WeatherInterval = GetDefaultWeatherInterval(m_Weather);
return;
}
// Set new period for the selected weather:
m_WeatherInterval = GetDefaultWeatherInterval(a_NewWeather);
// The weather can't be found:
if (m_WeatherInterval < 0)
{
return;
}
m_Weather = a_NewWeather;
BroadcastWeather(m_Weather);
// Let the plugins know about the change:
cPluginManager::Get()->CallHookWeatherChanged(*this);
}
void cWorld::ChangeWeather(void)
{
// In the next tick the weather will be changed
m_WeatherInterval = 0;
}
void cWorld::SetNextBlockTick(int a_BlockX, int a_BlockY, int a_BlockZ)
{
return m_ChunkMap->SetNextBlockTick(a_BlockX, a_BlockY, a_BlockZ);
}
void cWorld::InitializeSpawn(void)
{
if (!m_IsSpawnExplicitlySet)
{
// Spawn position wasn't already explicitly set, enerate random solid-land coordinate and then write it to the world configuration:
GenerateRandomSpawn();
cIniFile IniFile;
IniFile.ReadFile(m_IniFileName);
IniFile.SetValueF("SpawnPosition", "X", m_SpawnX);
IniFile.SetValueF("SpawnPosition", "Y", m_SpawnY);
IniFile.SetValueF("SpawnPosition", "Z", m_SpawnZ);
IniFile.WriteFile(m_IniFileName);
}
int ChunkX = 0, ChunkZ = 0;
cChunkDef::BlockToChunk((int)m_SpawnX, (int)m_SpawnZ, ChunkX, ChunkZ);
// For the debugging builds, don't make the server build too much world upon start:
#if defined(_DEBUG) || defined(ANDROID_NDK)
const int DefaultViewDist = 9;
#else
const int DefaultViewDist = 20; // Always prepare an area 20 chunks across, no matter what the actual cClientHandle::VIEWDISTANCE is
#endif // _DEBUG
cIniFile IniFile;
IniFile.ReadFile(m_IniFileName);
int ViewDist = IniFile.GetValueSetI("SpawnPosition", "PregenerateDistance", DefaultViewDist);
IniFile.WriteFile(m_IniFileName);
cSpawnPrepare prep(*this, ChunkX, ChunkZ, ViewDist);
prep.Wait();
#ifdef TEST_LINEBLOCKTRACER
// DEBUG: Test out the cLineBlockTracer class by tracing a few lines:
class cTracerCallbacks :
public cBlockTracer::cCallbacks
{
virtual bool OnNextBlock(int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE a_BlockType, NIBBLETYPE a_BlockMeta) override
{
LOGD("Block {%d, %d, %d}: %d:%d (%s)",
a_BlockX, a_BlockY, a_BlockZ, a_BlockType, a_BlockMeta,
ItemToString(cItem(a_BlockType, 1, a_BlockMeta)).c_str()
);
return false;
}
virtual bool OnNextBlockNoData(int a_BlockX, int a_BlockY, int a_BlockZ) override
{
LOGD("Block {%d, %d, %d}: no data available",
a_BlockX, a_BlockY, a_BlockZ
);
return false;
}
virtual bool OnOutOfWorld(double a_BlockX, double a_BlockY, double a_BlockZ) override
{
LOGD("Out of world at {%f, %f, %f}", a_BlockX, a_BlockY, a_BlockZ);
return false;
}
virtual bool OnIntoWorld(double a_BlockX, double a_BlockY, double a_BlockZ) override
{
LOGD("Into world at {%f, %f, %f}", a_BlockX, a_BlockY, a_BlockZ);
return false;
}
virtual void OnNoMoreHits(void) override
{
LOGD("No more hits");
}
} Callbacks;
LOGD("Spawn is at {%f, %f, %f}", m_SpawnX, m_SpawnY, m_SpawnZ);
LOGD("Tracing a line along +X:");
cLineBlockTracer::Trace(*this, Callbacks, m_SpawnX - 10, m_SpawnY, m_SpawnZ, m_SpawnX + 10, m_SpawnY, m_SpawnZ);
LOGD("Tracing a line along -Z:");
cLineBlockTracer::Trace(*this, Callbacks, m_SpawnX, m_SpawnY, m_SpawnZ + 10, m_SpawnX, m_SpawnY, m_SpawnZ - 10);
LOGD("Tracing a line along -Y, out of world:");
cLineBlockTracer::Trace(*this, Callbacks, m_SpawnX, 260, m_SpawnZ, m_SpawnX, -5, m_SpawnZ);
LOGD("Tracing a line along XY:");
cLineBlockTracer::Trace(*this, Callbacks, m_SpawnX - 10, m_SpawnY - 10, m_SpawnZ, m_SpawnX + 10, m_SpawnY + 10, m_SpawnZ);
LOGD("Tracing a line in generic direction:");
cLineBlockTracer::Trace(*this, Callbacks, m_SpawnX - 15, m_SpawnY - 5, m_SpawnZ + 7.5, m_SpawnX + 13, m_SpawnY - 10, m_SpawnZ + 8.5);
LOGD("Tracing tests done");
#endif // TEST_LINEBLOCKTRACER
}
void cWorld::Start(void)
{
m_SpawnX = 0;
m_SpawnY = cChunkDef::Height;
m_SpawnZ = 0;
m_GameMode = eGameMode_Creative;
cIniFile IniFile;
if (!IniFile.ReadFile(m_IniFileName))
{
LOGWARNING("Cannot read world settings from \"%s\", defaults will be used.", m_IniFileName.c_str());
// TODO: More descriptions for each key
IniFile.AddHeaderComment(" This is the per-world configuration file, managing settings such as generators, simulators, and spawn points");
IniFile.AddKeyComment(" LinkedWorlds", "This section governs portal world linkage; leave a value blank to disabled that associated method of teleportation");
}
// The presence of a configuration value overrides everything
// If no configuration value is found, GetDimension() is written to file and the variable is written to again to ensure that cosmic rays haven't sneakily changed its value
m_Dimension = StringToDimension(IniFile.GetValueSet("General", "Dimension", DimensionToString(GetDimension())));
m_BroadcastDeathMessages = IniFile.GetValueSetB("Broadcasting", "BroadcastDeathMessages", true);
m_BroadcastAchievementMessages = IniFile.GetValueSetB("Broadcasting", "BroadcastAchievementMessages", true);
SetMaxViewDistance(IniFile.GetValueSetI("SpawnPosition", "MaxViewDistance", 12));
// Try to find the "SpawnPosition" key and coord values in the world configuration, set the flag if found
int KeyNum = IniFile.FindKey("SpawnPosition");
m_IsSpawnExplicitlySet =
(
(KeyNum >= 0) &&
(
(IniFile.FindValue(KeyNum, "X") >= 0) &&
(IniFile.FindValue(KeyNum, "Y") >= 0) &&
(IniFile.FindValue(KeyNum, "Z") >= 0)
)
);
if (m_IsSpawnExplicitlySet)
{
LOGD("Spawnpoint explicitly set!");
m_SpawnX = IniFile.GetValueF("SpawnPosition", "X", m_SpawnX);
m_SpawnY = IniFile.GetValueF("SpawnPosition", "Y", m_SpawnY);
m_SpawnZ = IniFile.GetValueF("SpawnPosition", "Z", m_SpawnZ);
}
m_StorageSchema = IniFile.GetValueSet ("Storage", "Schema", m_StorageSchema);
m_StorageCompressionFactor = IniFile.GetValueSetI("Storage", "CompressionFactor", m_StorageCompressionFactor);
m_MaxCactusHeight = IniFile.GetValueSetI("Plants", "MaxCactusHeight", 3);
m_MaxSugarcaneHeight = IniFile.GetValueSetI("Plants", "MaxSugarcaneHeight", 3);
m_IsCactusBonemealable = IniFile.GetValueSetB("Plants", "IsCactusBonemealable", false);
m_IsCarrotsBonemealable = IniFile.GetValueSetB("Plants", "IsCarrotsBonemealable", true);
m_IsCropsBonemealable = IniFile.GetValueSetB("Plants", "IsCropsBonemealable", true);
m_IsGrassBonemealable = IniFile.GetValueSetB("Plants", "IsGrassBonemealable", true);
m_IsMelonStemBonemealable = IniFile.GetValueSetB("Plants", "IsMelonStemBonemealable", true);
m_IsMelonBonemealable = IniFile.GetValueSetB("Plants", "IsMelonBonemealable", false);
m_IsPotatoesBonemealable = IniFile.GetValueSetB("Plants", "IsPotatoesBonemealable", true);
m_IsPumpkinStemBonemealable = IniFile.GetValueSetB("Plants", "IsPumpkinStemBonemealable", true);
m_IsPumpkinBonemealable = IniFile.GetValueSetB("Plants", "IsPumpkinBonemealable", false);
m_IsSaplingBonemealable = IniFile.GetValueSetB("Plants", "IsSaplingBonemealable", true);
m_IsSugarcaneBonemealable = IniFile.GetValueSetB("Plants", "IsSugarcaneBonemealable", false);
m_IsDeepSnowEnabled = IniFile.GetValueSetB("Physics", "DeepSnow", true);
m_ShouldLavaSpawnFire = IniFile.GetValueSetB("Physics", "ShouldLavaSpawnFire", true);
int TNTShrapnelLevel = IniFile.GetValueSetI("Physics", "TNTShrapnelLevel", (int)slAll);
m_bCommandBlocksEnabled = IniFile.GetValueSetB("Mechanics", "CommandBlocksEnabled", false);
m_bEnabledPVP = IniFile.GetValueSetB("Mechanics", "PVPEnabled", true);
m_bUseChatPrefixes = IniFile.GetValueSetB("Mechanics", "UseChatPrefixes", true);
m_VillagersShouldHarvestCrops = IniFile.GetValueSetB("Monsters", "VillagersShouldHarvestCrops", true);
m_IsDaylightCycleEnabled = IniFile.GetValueSetB("General", "IsDaylightCycleEnabled", true);
int GameMode = IniFile.GetValueSetI("General", "Gamemode", (int)m_GameMode);
int Weather = IniFile.GetValueSetI("General", "Weather", (int)m_Weather);
if (GetDimension() == dimOverworld)
{
m_LinkedNetherWorldName = IniFile.GetValueSet("LinkedWorlds", "NetherWorldName", GetName() + "_nether");
m_LinkedEndWorldName = IniFile.GetValueSet("LinkedWorlds", "EndWorldName", GetName() + "_end");
}
else
{
m_LinkedOverworldName = IniFile.GetValueSet("LinkedWorlds", "OverworldName", GetLinkedOverworldName());
}
// Adjust the enum-backed variables into their respective bounds:
m_GameMode = (eGameMode) Clamp(GameMode, (int)gmSurvival, (int)gmSpectator);
m_TNTShrapnelLevel = (eShrapnelLevel)Clamp(TNTShrapnelLevel, (int)slNone, (int)slAll);
m_Weather = (eWeather) Clamp(Weather, (int)wSunny, (int)wStorm);
InitialiseGeneratorDefaults(IniFile);
InitialiseAndLoadMobSpawningValues(IniFile);
SetTimeOfDay(IniFile.GetValueSetI("General", "TimeInTicks", GetTimeOfDay()));
m_ChunkMap = cpp14::make_unique<cChunkMap>(this);
// preallocate some memory for ticking blocks so we don't need to allocate that often
m_BlockTickQueue.reserve(1000);
m_BlockTickQueueCopy.reserve(1000);
// Simulators:
m_SimulatorManager = cpp14::make_unique<cSimulatorManager>(*this);
m_WaterSimulator = InitializeFluidSimulator(IniFile, "Water", E_BLOCK_WATER, E_BLOCK_STATIONARY_WATER);
m_LavaSimulator = InitializeFluidSimulator(IniFile, "Lava", E_BLOCK_LAVA, E_BLOCK_STATIONARY_LAVA);
m_SandSimulator = cpp14::make_unique<cSandSimulator>(*this, IniFile);
m_FireSimulator = cpp14::make_unique<cFireSimulator>(*this, IniFile);
m_RedstoneSimulator = InitializeRedstoneSimulator(IniFile);
// Water, Lava and Redstone simulators get registered in their initialize function.
m_SimulatorManager->RegisterSimulator(m_SandSimulator.get(), 1);
m_SimulatorManager->RegisterSimulator(m_FireSimulator.get(), 1);
m_Lighting.Start(this);
m_Storage.Start(this, m_StorageSchema, m_StorageCompressionFactor);
m_Generator.Start(m_GeneratorCallbacks, m_GeneratorCallbacks, IniFile);
m_ChunkSender.Start(this);
m_TickThread.Start();
// Init of the spawn monster time (as they are supposed to have different spawn rate)
m_LastSpawnMonster.insert(std::map<cMonster::eFamily, cTickTimeLong>::value_type(cMonster::mfHostile, cTickTimeLong(0)));
m_LastSpawnMonster.insert(std::map<cMonster::eFamily, cTickTimeLong>::value_type(cMonster::mfPassive, cTickTimeLong(0)));
m_LastSpawnMonster.insert(std::map<cMonster::eFamily, cTickTimeLong>::value_type(cMonster::mfAmbient, cTickTimeLong(0)));
m_LastSpawnMonster.insert(std::map<cMonster::eFamily, cTickTimeLong>::value_type(cMonster::mfWater, cTickTimeLong(0)));
m_MapManager.LoadMapData();
// Save any changes that the defaults may have done to the ini file:
if (!IniFile.WriteFile(m_IniFileName))
{
LOGWARNING("Could not write world config to %s", m_IniFileName.c_str());
}
}
void cWorld::GenerateRandomSpawn(void)
{
LOGD("Generating random spawnpoint...");
bool foundSpawnPoint = false;
// Look for a spawn point at most 100 chunks away from map center:
for (int i = 0; i < 100; i++)
{
EMCSBiome biome = GetBiomeAt((int)m_SpawnX, (int)m_SpawnZ);
if (
(biome != biOcean) && (biome != biFrozenOcean) && // The biome is acceptable (don't want a small ocean island)
!IsBlockWaterOrIce(GetBlock((int)m_SpawnX, GetHeight((int)m_SpawnX, (int)m_SpawnZ), (int)m_SpawnZ)) // The terrain is acceptable (don't want to spawn inside a lake / river)
)
{
if (CheckPlayerSpawnPoint((int)m_SpawnX, GetHeight((int)m_SpawnX, (int)m_SpawnZ), (int)m_SpawnZ))
{
// A good spawnpoint was found
foundSpawnPoint = true;
break;
}
}
// Try a neighboring chunk:
if ((GetTickRandomNumber(4) % 2) == 0) // Randomise whether to increment X or Z coords
{
m_SpawnX += cChunkDef::Width;
}
else
{
m_SpawnZ += cChunkDef::Width;
}
} // for i - 100*
m_SpawnY = (double)GetHeight((int)m_SpawnX, (int)m_SpawnZ) + 1.6f; // 1.6f to accomodate player height
if (foundSpawnPoint)
{
LOGINFO("Generated random spawnpoint position at {%i, %i, %i}", (int)m_SpawnX, (int)m_SpawnY, (int)m_SpawnZ);
}
else
{
LOGINFO("Did not find an acceptable spawnpoint. Generated a random spawnpoint position at {%i, %i, %i}", (int)m_SpawnX, (int)m_SpawnY, (int)m_SpawnZ);
} // Maybe widen the search instead?
}
bool cWorld::CheckPlayerSpawnPoint(int a_PosX, int a_PosY, int a_PosZ)
{
// Check that spawnblock and surrounding blocks are neither solid nor water / lava
static const struct
{
int x, z;
} Coords[] =
{
{ 0, 0 },
{ -1, 0 },
{ 1, 0 },
{ 0, -1 },
{ 0, 1 },
};
for (size_t i = 0; i < ARRAYCOUNT(Coords); i++)
{
BLOCKTYPE BlockType = GetBlock(a_PosX + Coords[i].x, a_PosY, a_PosZ + Coords[i].x);
if (cBlockInfo::IsSolid(BlockType) || IsBlockLiquid(BlockType))
{
return false;
}
} // for i - Coords[]
// Check that the block below is solid:
if (!cBlockInfo::IsSolid(GetBlock(a_PosX, a_PosY - 1, a_PosZ)))
{
return false;
}
// Check that all the blocks above the spawnpoint are not solid:
for (int i = a_PosY; i < cChunkDef::Height; i++)
{
BLOCKTYPE BlockType = GetBlock(a_PosX, i, a_PosZ);
if (cBlockInfo::IsSolid(BlockType))
{
return false;
}
}
return true;
}
eWeather cWorld::ChooseNewWeather()
{
// Pick a new weather. Only reasonable transitions allowed:
switch (m_Weather)
{
case eWeather_Sunny:
case eWeather_ThunderStorm: return eWeather_Rain;
case eWeather_Rain:
{
// 1 / 8 chance of turning into a thunderstorm
return ((m_TickRand.randInt() % 256) < 32) ? eWeather_ThunderStorm : eWeather_Sunny;
}
}
LOGWARNING("Unknown current weather: %d. Setting sunny.", m_Weather);
ASSERT(!"Unknown weather");
return eWeather_Sunny;
}
void cWorld::InitialiseGeneratorDefaults(cIniFile & a_IniFile)
{
switch (GetDimension())
{
case dimEnd:
{
a_IniFile.GetValueSet("Generator", "Generator", "Composable");
a_IniFile.GetValueSet("Generator", "BiomeGen", "Constant");
a_IniFile.GetValueSet("Generator", "ConstantBiome", "End");
a_IniFile.GetValueSet("Generator", "ShapeGen", "End");
a_IniFile.GetValueSet("Generator", "CompositionGen", "End");
break;
}
case dimOverworld:
{
a_IniFile.GetValueSet("Generator", "Generator", "Composable");
a_IniFile.GetValueSet("Generator", "BiomeGen", "Grown");
a_IniFile.GetValueSet("Generator", "ShapeGen", "BiomalNoise3D");
a_IniFile.GetValueSet("Generator", "CompositionGen", "Biomal");
a_IniFile.GetValueSet("Generator", "Finishers", "RoughRavines, WormNestCaves, WaterLakes, WaterSprings, LavaLakes, LavaSprings, OreNests, Mineshafts, Trees, Villages, TallGrass, SprinkleFoliage, Ice, Snow, Lilypads, BottomLava, DeadBushes, NaturalPatches, PreSimulator, Animals");
break;
}
case dimNether:
{
a_IniFile.GetValueSet("Generator", "Generator", "Composable");
a_IniFile.GetValueSet("Generator", "BiomeGen", "Constant");
a_IniFile.GetValueSet("Generator", "ConstantBiome", "Nether");
a_IniFile.GetValueSet("Generator", "ShapeGen", "HeightMap");
a_IniFile.GetValueSet("Generator", "HeightGen", "Flat");
a_IniFile.GetValueSet("Generator", "FlatHeight", "128");
a_IniFile.GetValueSet("Generator", "CompositionGen", "Nether");
a_IniFile.GetValueSet("Generator", "Finishers", "SoulsandRims, WormNestCaves, BottomLava, LavaSprings, NetherClumpFoliage, NetherOreNests, NetherForts, GlowStone, PreSimulator");
a_IniFile.GetValueSet("Generator", "BottomLavaHeight", "30");
break;
}
case dimNotSet:
{
ASSERT(!"Dimension not set");
break;
}
}
}
void cWorld::InitialiseAndLoadMobSpawningValues(cIniFile & a_IniFile)
{
AString DefaultMonsters;
switch (m_Dimension)
{
case dimOverworld: DefaultMonsters = "bat, cavespider, chicken, cow, creeper, enderman, guardian, horse, mooshroom, ocelot, pig, rabbit, sheep, silverfish, skeleton, slime, spider, squid, wolf, zombie"; break;
case dimNether: DefaultMonsters = "blaze, ghast, magmacube, skeleton, zombie, zombiepigman"; break;
case dimEnd: DefaultMonsters = "enderman"; break;
case dimNotSet: ASSERT(!"Dimension not set"); break;
}
m_bAnimals = a_IniFile.GetValueSetB("Monsters", "AnimalsOn", true);
AString AllMonsters = a_IniFile.GetValueSet("Monsters", "Types", DefaultMonsters);
if (!m_bAnimals)
{
return;
}
AStringVector SplitList = StringSplitAndTrim(AllMonsters, ",");
for (AStringVector::const_iterator itr = SplitList.begin(), end = SplitList.end(); itr != end; ++itr)
{
eMonsterType ToAdd = cMonster::StringToMobType(*itr);
if (ToAdd != mtInvalidType)
{
m_AllowedMobs.insert(ToAdd);
LOGD("Allowed mob: %s", itr->c_str());
}
else
{
LOG("World \"%s\": Unknown mob type: %s", m_WorldName.c_str(), itr->c_str());
}
}
}
void cWorld::Stop(void)
{
// Delete the clients that have been in this world:
{
cCSLock Lock(m_CSClients);
for (auto itr = m_Clients.begin(); itr != m_Clients.end(); ++itr)
{
(*itr)->Destroy();
} // for itr - m_Clients[]
m_Clients.clear();
}
// Write settings to file; these are all plugin changeable values - keep updated!
cIniFile IniFile;
IniFile.ReadFile(m_IniFileName);
if (GetDimension() == dimOverworld)
{
IniFile.SetValue("LinkedWorlds", "NetherWorldName", m_LinkedNetherWorldName);
IniFile.SetValue("LinkedWorlds", "EndWorldName", m_LinkedEndWorldName);
}
else
{
IniFile.SetValue("LinkedWorlds", "OverworldName", m_LinkedOverworldName);
}
IniFile.SetValueI("Physics", "TNTShrapnelLevel", static_cast<int>(m_TNTShrapnelLevel));
IniFile.SetValueB("Mechanics", "CommandBlocksEnabled", m_bCommandBlocksEnabled);
IniFile.SetValueB("Mechanics", "UseChatPrefixes", m_bUseChatPrefixes);
IniFile.SetValueB("General", "IsDaylightCycleEnabled", m_IsDaylightCycleEnabled);
IniFile.SetValueI("General", "Weather", static_cast<int>(m_Weather));
IniFile.SetValueI("General", "TimeInTicks", GetTimeOfDay());
IniFile.WriteFile(m_IniFileName);
m_TickThread.Stop();
m_Lighting.Stop();
m_Generator.Stop();
m_ChunkSender.Stop();
m_Storage.Stop();
}
void cWorld::Tick(std::chrono::milliseconds a_Dt, std::chrono::milliseconds a_LastTickDurationMSec)
{
// Call the plugins
cPluginManager::Get()->CallHookWorldTick(*this, a_Dt, a_LastTickDurationMSec);
// Set any chunk data that has been queued for setting:
cSetChunkDataPtrs SetChunkDataQueue;
{
cCSLock Lock(m_CSSetChunkDataQueue);
std::swap(SetChunkDataQueue, m_SetChunkDataQueue);
}
for (cSetChunkDataPtrs::iterator itr = SetChunkDataQueue.begin(), end = SetChunkDataQueue.end(); itr != end; ++itr)
{
SetChunkData(**itr);
} // for itr - SetChunkDataQueue[]
m_WorldAge += a_Dt;
if (m_IsDaylightCycleEnabled)
{
// We need sub-tick precision here, that's why we store the time in milliseconds and calculate ticks off of it
m_TimeOfDay += a_Dt;
// Wrap time of day each 20 minutes (1200 seconds)
if (m_TimeOfDay > std::chrono::minutes(20))
{
m_TimeOfDay -= std::chrono::minutes(20);
}
// Updates the sky darkness based on current time of day
UpdateSkyDarkness();
// Broadcast time update every 40 ticks (2 seconds)
if (m_LastTimeUpdate < m_WorldAge - cTickTime(40))
{
BroadcastTimeUpdate();
m_LastTimeUpdate = std::chrono::duration_cast<cTickTimeLong>(m_WorldAge);
}
}
// Add entities waiting in the queue to be added:
{
cCSLock Lock(m_CSEntitiesToAdd);
for (cEntityList::iterator itr = m_EntitiesToAdd.begin(), end = m_EntitiesToAdd.end(); itr != end; ++itr)
{
(*itr)->SetWorld(this);
m_ChunkMap->AddEntity(*itr);
}
m_EntitiesToAdd.clear();
}
// Add players waiting in the queue to be added:
AddQueuedPlayers();
m_ChunkMap->Tick(a_Dt);
TickClients(static_cast<float>(a_Dt.count()));
TickQueuedBlocks();
TickQueuedTasks();
TickScheduledTasks();
GetSimulatorManager()->Simulate(static_cast<float>(a_Dt.count()));
TickWeather(static_cast<float>(a_Dt.count()));
m_ChunkMap->FastSetQueuedBlocks();
if (m_WorldAge - m_LastSave > std::chrono::minutes(5)) // Save each 5 minutes
{
SaveAllChunks();
}
if (m_WorldAge - m_LastUnload > std::chrono::minutes(5)) // Unload every 10 seconds
{
UnloadUnusedChunks();
}
TickMobs(a_Dt);
}
void cWorld::TickWeather(float a_Dt)
{
UNUSED(a_Dt);
// There are no weather changes anywhere but in the Overworld:
if (GetDimension() != dimOverworld)
{
return;
}
if (m_WeatherInterval > 0)
{
// Not yet, wait for the weather period to end
m_WeatherInterval--;
}
else
{
// Change weather:
SetWeather(ChooseNewWeather());
}
if (m_Weather == eWeather_ThunderStorm)
{
// 0.5% chance per tick of thunderbolt
if (m_TickRand.randInt() % 199 == 0)
{
CastThunderbolt(0, 0, 0); // TODO: find random positions near players to cast thunderbolts.
}
}
}
void cWorld::TickMobs(std::chrono::milliseconds a_Dt)
{
// _X 2013_10_22: This is a quick fix for #283 - the world needs to be locked while ticking mobs
cWorld::cLock Lock(*this);
// before every Mob action, we have to count them depending on the distance to players, on their family ...
cMobCensus MobCensus;
m_ChunkMap->CollectMobCensus(MobCensus);
if (m_bAnimals)
{
// Spawning is enabled, spawn now:
static const cMonster::eFamily AllFamilies[] =
{
cMonster::mfHostile,
cMonster::mfPassive,
cMonster::mfAmbient,
cMonster::mfWater,
} ;
for (size_t i = 0; i < ARRAYCOUNT(AllFamilies); i++)
{
cMonster::eFamily Family = AllFamilies[i];
cTickTime SpawnDelay = cTickTime(cMonster::GetSpawnDelay(Family));
if (
(m_LastSpawnMonster[Family] > m_WorldAge - SpawnDelay) || // Not reached the needed ticks before the next round
MobCensus.IsCapped(Family)
)
{
continue;
}
m_LastSpawnMonster[Family] = std::chrono::duration_cast<cTickTimeLong>(m_WorldAge);
cMobSpawner Spawner(Family, m_AllowedMobs);
if (Spawner.CanSpawnAnything())
{
m_ChunkMap->SpawnMobs(Spawner);
// do the spawn
for (cMobSpawner::tSpawnedContainer::const_iterator itr2 = Spawner.getSpawned().begin(); itr2 != Spawner.getSpawned().end(); ++itr2)
{
SpawnMobFinalize(*itr2);
}
}
} // for i - AllFamilies[]
} // if (Spawning enabled)
// move close mobs
cMobProximityCounter::sIterablePair allCloseEnoughToMoveMobs = MobCensus.GetProximityCounter().getMobWithinThosesDistances(-1, 64 * 16);// MG TODO : deal with this magic number (the 16 is the size of a block)
for (cMobProximityCounter::tDistanceToMonster::const_iterator itr = allCloseEnoughToMoveMobs.m_Begin; itr != allCloseEnoughToMoveMobs.m_End; ++itr)
{
itr->second.m_Monster.Tick(a_Dt, itr->second.m_Chunk);
}
// remove too far mobs
cMobProximityCounter::sIterablePair allTooFarMobs = MobCensus.GetProximityCounter().getMobWithinThosesDistances(128 * 16, -1);// MG TODO : deal with this magic number (the 16 is the size of a block)
for (cMobProximityCounter::tDistanceToMonster::const_iterator itr = allTooFarMobs.m_Begin; itr != allTooFarMobs.m_End; ++itr)
{
itr->second.m_Monster.Destroy(true);
}
}
void cWorld::TickQueuedTasks(void)
{
// Make a copy of the tasks to avoid deadlocks on accessing m_Tasks
cTasks Tasks;
{
cCSLock Lock(m_CSTasks);
std::swap(Tasks, m_Tasks);
}
// Execute and delete each task:
for (cTasks::iterator itr = Tasks.begin(), end = Tasks.end(); itr != end; ++itr)
{
(*itr)->Run(*this);
} // for itr - m_Tasks[]
}
void cWorld::TickScheduledTasks(void)
{
// Move the tasks to be executed to a seperate vector to avoid deadlocks on accessing m_Tasks
cScheduledTasks Tasks;
{
cCSLock Lock(m_CSScheduledTasks);
auto WorldAge = m_WorldAge;
// Move all the due tasks from m_ScheduledTasks into Tasks:
for (auto itr = m_ScheduledTasks.begin(); itr != m_ScheduledTasks.end();) // Cannot use range-based for, we're modifying the container
{
if ((*itr)->m_TargetTick < std::chrono::duration_cast<cTickTimeLong>(WorldAge).count())
{
auto next = itr;
++next;
Tasks.push_back(std::move(*itr));
m_ScheduledTasks.erase(itr);
itr = next;
}
else
{
// All the eligible tasks have been moved, bail out now
break;
}
}
}
// Execute and delete each task:
for (cScheduledTasks::iterator itr = Tasks.begin(), end = Tasks.end(); itr != end; ++itr)
{
(*itr)->m_Task->Run(*this);
} // for itr - m_Tasks[]
}
void cWorld::TickClients(float a_Dt)
{
cClientHandlePtrs RemoveClients;
{
cCSLock Lock(m_CSClients);
// Remove clients scheduled for removal:
for (auto itr = m_ClientsToRemove.begin(), end = m_ClientsToRemove.end(); itr != end; ++itr)
{
for (auto itrC = m_Clients.begin(), endC = m_Clients.end(); itrC != endC; ++itrC)
{
if (itrC->get() == *itr)
{
m_Clients.erase(itrC);
break;
}
}
} // for itr - m_ClientsToRemove[]
m_ClientsToRemove.clear();
// Add clients scheduled for adding:
for (auto itr = m_ClientsToAdd.begin(), end = m_ClientsToAdd.end(); itr != end; ++itr)
{
ASSERT(std::find(m_Clients.begin(), m_Clients.end(), *itr) == m_Clients.end());
m_Clients.push_back(*itr);
} // for itr - m_ClientsToRemove[]
m_ClientsToAdd.clear();
// Tick the clients, take out those that have been destroyed into RemoveClients
for (auto itr = m_Clients.begin(); itr != m_Clients.end();)
{
if ((*itr)->IsDestroyed())
{
// Remove the client later, when CS is not held, to avoid deadlock
RemoveClients.push_back(*itr);
itr = m_Clients.erase(itr);
continue;
}
(*itr)->Tick(a_Dt);
++itr;
} // for itr - m_Clients[]
}
// Delete the clients queued for removal:
RemoveClients.clear();
}
void cWorld::UpdateSkyDarkness(void)
{
int TempTime = std::chrono::duration_cast<cTickTime>(m_TimeOfDay).count();
if (TempTime <= TIME_SUNSET)
{
m_SkyDarkness = 0;
}
else if (TempTime <= TIME_NIGHT_START)
{
m_SkyDarkness = static_cast<NIBBLETYPE>((TIME_NIGHT_START - TempTime) / TIME_SPAWN_DIVISOR);
}
else if (TempTime <= TIME_NIGHT_END)
{
m_SkyDarkness = 8;
}
else
{
m_SkyDarkness = static_cast<NIBBLETYPE>((TIME_SUNRISE - TempTime) / TIME_SPAWN_DIVISOR);
}
}
void cWorld::WakeUpSimulators(int a_BlockX, int a_BlockY, int a_BlockZ)
{
return m_ChunkMap->WakeUpSimulators(a_BlockX, a_BlockY, a_BlockZ);
}
/// Wakes up the simulators for the specified area of blocks
void cWorld::WakeUpSimulatorsInArea(int a_MinBlockX, int a_MaxBlockX, int a_MinBlockY, int a_MaxBlockY, int a_MinBlockZ, int a_MaxBlockZ)
{
return m_ChunkMap->WakeUpSimulatorsInArea(a_MinBlockX, a_MaxBlockX, a_MinBlockY, a_MaxBlockY, a_MinBlockZ, a_MaxBlockZ);
}
bool cWorld::ForEachBlockEntityInChunk(int a_ChunkX, int a_ChunkZ, cBlockEntityCallback & a_Callback)
{
return m_ChunkMap->ForEachBlockEntityInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachChestInChunk(int a_ChunkX, int a_ChunkZ, cChestCallback & a_Callback)
{
return m_ChunkMap->ForEachChestInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachDispenserInChunk(int a_ChunkX, int a_ChunkZ, cDispenserCallback & a_Callback)
{
return m_ChunkMap->ForEachDispenserInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachDropperInChunk(int a_ChunkX, int a_ChunkZ, cDropperCallback & a_Callback)
{
return m_ChunkMap->ForEachDropperInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachDropSpenserInChunk(int a_ChunkX, int a_ChunkZ, cDropSpenserCallback & a_Callback)
{
return m_ChunkMap->ForEachDropSpenserInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachFurnaceInChunk(int a_ChunkX, int a_ChunkZ, cFurnaceCallback & a_Callback)
{
return m_ChunkMap->ForEachFurnaceInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
void cWorld::DoExplosionAt(double a_ExplosionSize, double a_BlockX, double a_BlockY, double a_BlockZ, bool a_CanCauseFire, eExplosionSource a_Source, void * a_SourceData)
{
if (cPluginManager::Get()->CallHookExploding(*this, a_ExplosionSize, a_CanCauseFire, a_BlockX, a_BlockY, a_BlockZ, a_Source, a_SourceData) || (a_ExplosionSize <= 0))
{
return;
}
// TODO: Implement block hardiness
Vector3d explosion_pos = Vector3d(a_BlockX, a_BlockY, a_BlockZ);
cVector3iArray BlocksAffected;
m_ChunkMap->DoExplosionAt(a_ExplosionSize, a_BlockX, a_BlockY, a_BlockZ, BlocksAffected);
BroadcastSoundEffect("random.explode", (double)a_BlockX, (double)a_BlockY, (double)a_BlockZ, 1.0f, 0.6f);
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if (ch == nullptr)
{
continue;
}
Vector3d distance_explosion = (*itr)->GetPosition() - explosion_pos;
if (distance_explosion.SqrLength() < 4096.0)
{
double real_distance = std::max(0.004, distance_explosion.Length());
double power = a_ExplosionSize / real_distance;
if (power <= 1)
{
power = 0;
}
distance_explosion.Normalize();
distance_explosion *= power;
ch->SendExplosion(a_BlockX, a_BlockY, a_BlockZ, (float)a_ExplosionSize, BlocksAffected, distance_explosion);
}
}
}
cPluginManager::Get()->CallHookExploded(*this, a_ExplosionSize, a_CanCauseFire, a_BlockX, a_BlockY, a_BlockZ, a_Source, a_SourceData);
}
bool cWorld::DoWithBlockEntityAt(int a_BlockX, int a_BlockY, int a_BlockZ, cBlockEntityCallback & a_Callback)
{
return m_ChunkMap->DoWithBlockEntityAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithBeaconAt(int a_BlockX, int a_BlockY, int a_BlockZ, cBeaconCallback & a_Callback)
{
return m_ChunkMap->DoWithBeaconAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithChestAt(int a_BlockX, int a_BlockY, int a_BlockZ, cChestCallback & a_Callback)
{
return m_ChunkMap->DoWithChestAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithDispenserAt(int a_BlockX, int a_BlockY, int a_BlockZ, cDispenserCallback & a_Callback)
{
return m_ChunkMap->DoWithDispenserAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithDropperAt(int a_BlockX, int a_BlockY, int a_BlockZ, cDropperCallback & a_Callback)
{
return m_ChunkMap->DoWithDropperAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithDropSpenserAt(int a_BlockX, int a_BlockY, int a_BlockZ, cDropSpenserCallback & a_Callback)
{
return m_ChunkMap->DoWithDropSpenserAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithFurnaceAt(int a_BlockX, int a_BlockY, int a_BlockZ, cFurnaceCallback & a_Callback)
{
return m_ChunkMap->DoWithFurnaceAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithNoteBlockAt(int a_BlockX, int a_BlockY, int a_BlockZ, cNoteBlockCallback & a_Callback)
{
return m_ChunkMap->DoWithNoteBlockAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithCommandBlockAt(int a_BlockX, int a_BlockY, int a_BlockZ, cCommandBlockCallback & a_Callback)
{
return m_ChunkMap->DoWithCommandBlockAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithMobHeadAt(int a_BlockX, int a_BlockY, int a_BlockZ, cMobHeadCallback & a_Callback)
{
return m_ChunkMap->DoWithMobHeadAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::DoWithFlowerPotAt(int a_BlockX, int a_BlockY, int a_BlockZ, cFlowerPotCallback & a_Callback)
{
return m_ChunkMap->DoWithFlowerPotAt(a_BlockX, a_BlockY, a_BlockZ, a_Callback);
}
bool cWorld::GetSignLines(int a_BlockX, int a_BlockY, int a_BlockZ, AString & a_Line1, AString & a_Line2, AString & a_Line3, AString & a_Line4)
{
return m_ChunkMap->GetSignLines(a_BlockX, a_BlockY, a_BlockZ, a_Line1, a_Line2, a_Line3, a_Line4);
}
bool cWorld::DoWithChunk(int a_ChunkX, int a_ChunkZ, cChunkCallback & a_Callback)
{
return m_ChunkMap->DoWithChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::DoWithChunkAt(Vector3i a_BlockPos, std::function<bool(cChunk &)> a_Callback)
{
return m_ChunkMap->DoWithChunkAt(a_BlockPos, a_Callback);
}
void cWorld::GrowTree(int a_X, int a_Y, int a_Z)
{
if (GetBlock(a_X, a_Y, a_Z) == E_BLOCK_SAPLING)
{
// There is a sapling here, grow a tree according to its type:
GrowTreeFromSapling(a_X, a_Y, a_Z, GetBlockMeta(a_X, a_Y, a_Z));
}
else
{
// There is nothing here, grow a tree based on the current biome here:
GrowTreeByBiome(a_X, a_Y, a_Z);
}
}
void cWorld::GrowTreeFromSapling(int a_X, int a_Y, int a_Z, NIBBLETYPE a_SaplingMeta)
{
cNoise Noise(m_Generator.GetSeed());
sSetBlockVector Logs, Other;
auto WorldAge = (int)(std::chrono::duration_cast<cTickTimeLong>(m_WorldAge).count() & 0xffffffff);
switch (a_SaplingMeta & 0x07)
{
case E_META_SAPLING_APPLE: GetAppleTreeImage (a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
case E_META_SAPLING_BIRCH: GetBirchTreeImage (a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
case E_META_SAPLING_CONIFER: GetConiferTreeImage(a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
case E_META_SAPLING_JUNGLE: GetJungleTreeImage (a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
case E_META_SAPLING_ACACIA: GetAcaciaTreeImage (a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
case E_META_SAPLING_DARK_OAK: GetDarkoakTreeImage(a_X, a_Y, a_Z, Noise, WorldAge, Logs, Other); break;
}
Other.insert(Other.begin(), Logs.begin(), Logs.end());
Logs.clear();
GrowTreeImage(Other);
}
void cWorld::GrowTreeByBiome(int a_X, int a_Y, int a_Z)
{
cNoise Noise(m_Generator.GetSeed());
sSetBlockVector Logs, Other;
GetTreeImageByBiome(a_X, a_Y, a_Z, Noise, (int)(std::chrono::duration_cast<cTickTimeLong>(m_WorldAge).count() & 0xffffffff), GetBiomeAt(a_X, a_Z), Logs, Other);
Other.insert(Other.begin(), Logs.begin(), Logs.end());
Logs.clear();
GrowTreeImage(Other);
}
void cWorld::GrowTreeImage(const sSetBlockVector & a_Blocks)
{
// Check that the tree has place to grow
// Make a copy of the log blocks:
sSetBlockVector b2;
for (sSetBlockVector::const_iterator itr = a_Blocks.begin(); itr != a_Blocks.end(); ++itr)
{
if (itr->m_BlockType == E_BLOCK_LOG)
{
b2.push_back(*itr);
}
} // for itr - a_Blocks[]
// Query blocktypes and metas at those log blocks:
if (!GetBlocks(b2, false))
{
return;
}
// Check that at each log's coord there's an block allowed to be overwritten:
for (sSetBlockVector::const_iterator itr = b2.begin(); itr != b2.end(); ++itr)
{
switch (itr->m_BlockType)
{
CASE_TREE_ALLOWED_BLOCKS:
{
break;
}
default:
{
return;
}
}
} // for itr - b2[]
// All ok, replace blocks with the tree image:
m_ChunkMap->ReplaceTreeBlocks(a_Blocks);
}
bool cWorld::GrowRipePlant(int a_BlockX, int a_BlockY, int a_BlockZ, bool a_IsByBonemeal)
{
BLOCKTYPE BlockType;
NIBBLETYPE BlockMeta;
GetBlockTypeMeta(a_BlockX, a_BlockY, a_BlockZ, BlockType, BlockMeta);
switch (BlockType)
{
case E_BLOCK_CARROTS:
{
if (a_IsByBonemeal && !m_IsCarrotsBonemealable)
{
return false;
}
if (BlockMeta < 7)
{
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, 7);
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
return true;
}
case E_BLOCK_COCOA_POD:
{
NIBBLETYPE TypeMeta = BlockMeta & 0x03;
int GrowState = BlockMeta >> 2;
if (GrowState < 2)
{
GrowState++;
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, (NIBBLETYPE) (GrowState << 2 | TypeMeta));
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
return true;
}
case E_BLOCK_CROPS:
{
if (a_IsByBonemeal && !m_IsCropsBonemealable)
{
return false;
}
if (BlockMeta < 7)
{
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, 7);
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
return true;
}
case E_BLOCK_MELON_STEM:
{
if (BlockMeta < 7)
{
if (a_IsByBonemeal && !m_IsMelonStemBonemealable)
{
return false;
}
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, 7);
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
else
{
if (a_IsByBonemeal && !m_IsMelonBonemealable)
{
return false;
}
GrowMelonPumpkin(a_BlockX, a_BlockY, a_BlockZ, BlockType);
}
return true;
}
case E_BLOCK_POTATOES:
{
if (a_IsByBonemeal && !m_IsPotatoesBonemealable)
{
return false;
}
if (BlockMeta < 7)
{
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, 7);
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
return true;
}
case E_BLOCK_PUMPKIN_STEM:
{
if (BlockMeta < 7)
{
if (a_IsByBonemeal && !m_IsPumpkinStemBonemealable)
{
return false;
}
FastSetBlock(a_BlockX, a_BlockY, a_BlockZ, BlockType, 7);
BroadcastSoundParticleEffect(2005, a_BlockX, a_BlockY, a_BlockZ, 0);
}
else
{
if (a_IsByBonemeal && !m_IsPumpkinBonemealable)
{
return false;
}
GrowMelonPumpkin(a_BlockX, a_BlockY, a_BlockZ, BlockType);
}
return true;
}
case E_BLOCK_SAPLING:
{
if (a_IsByBonemeal && !m_IsSaplingBonemealable)
{
return false;
}
GrowTreeFromSapling(a_BlockX, a_BlockY, a_BlockZ, BlockMeta);
return true;
}
case E_BLOCK_GRASS:
{
if (a_IsByBonemeal && !m_IsGrassBonemealable)
{
return false;
}
MTRand r1;
for (int i = 0; i < 60; i++)
{
int OfsX = static_cast<int>(r1.randInt(3) + r1.randInt(3) + r1.randInt(3) + r1.randInt(3)) / 2 - 3;
int OfsY = static_cast<int>(r1.randInt(3) + r1.randInt(3)) - 3;
int OfsZ = static_cast<int>(r1.randInt(3) + r1.randInt(3) + r1.randInt(3) + r1.randInt(3)) / 2 - 3;
BLOCKTYPE Ground = GetBlock(a_BlockX + OfsX, a_BlockY + OfsY, a_BlockZ + OfsZ);
if (Ground != E_BLOCK_GRASS)
{
continue;
}
BLOCKTYPE Above = GetBlock(a_BlockX + OfsX, a_BlockY + OfsY + 1, a_BlockZ + OfsZ);
if (Above != E_BLOCK_AIR)
{
continue;
}
BLOCKTYPE SpawnType;
NIBBLETYPE SpawnMeta = 0;
switch (r1.randInt(10))
{
case 0: SpawnType = E_BLOCK_YELLOW_FLOWER; break;
case 1: SpawnType = E_BLOCK_RED_ROSE; break;
default:
{
SpawnType = E_BLOCK_TALL_GRASS;
SpawnMeta = E_META_TALL_GRASS_GRASS;
break;
}
} // switch (random spawn block type)
FastSetBlock(a_BlockX + OfsX, a_BlockY + OfsY + 1, a_BlockZ + OfsZ, SpawnType, SpawnMeta);
BroadcastSoundParticleEffect(2005, a_BlockX + OfsX, a_BlockY + OfsY, a_BlockZ + OfsZ, 0);
} // for i - 50 times
return true;
}
case E_BLOCK_SUGARCANE:
{
if (a_IsByBonemeal && !m_IsSugarcaneBonemealable)
{
return false;
}
m_ChunkMap->GrowSugarcane(a_BlockX, a_BlockY, a_BlockZ, m_MaxSugarcaneHeight);
return true;
}
case E_BLOCK_CACTUS:
{
if (a_IsByBonemeal && !m_IsCactusBonemealable)
{
return false;
}
m_ChunkMap->GrowCactus(a_BlockX, a_BlockY, a_BlockZ, m_MaxCactusHeight);
return true;
}
} // switch (BlockType)
return false;
}
void cWorld::GrowCactus(int a_BlockX, int a_BlockY, int a_BlockZ, int a_NumBlocksToGrow)
{
m_ChunkMap->GrowCactus(a_BlockX, a_BlockY, a_BlockZ, a_NumBlocksToGrow);
}
void cWorld::GrowMelonPumpkin(int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE a_BlockType)
{
MTRand Rand;
m_ChunkMap->GrowMelonPumpkin(a_BlockX, a_BlockY, a_BlockZ, a_BlockType, Rand);
}
void cWorld::GrowSugarcane(int a_BlockX, int a_BlockY, int a_BlockZ, int a_NumBlocksToGrow)
{
m_ChunkMap->GrowSugarcane(a_BlockX, a_BlockY, a_BlockZ, a_NumBlocksToGrow);
}
EMCSBiome cWorld::GetBiomeAt (int a_BlockX, int a_BlockZ)
{
return m_ChunkMap->GetBiomeAt(a_BlockX, a_BlockZ);
}
bool cWorld::SetBiomeAt(int a_BlockX, int a_BlockZ, EMCSBiome a_Biome)
{
return m_ChunkMap->SetBiomeAt(a_BlockX, a_BlockZ, a_Biome);
}
bool cWorld::SetAreaBiome(int a_MinX, int a_MaxX, int a_MinZ, int a_MaxZ, EMCSBiome a_Biome)
{
return m_ChunkMap->SetAreaBiome(a_MinX, a_MaxX, a_MinZ, a_MaxZ, a_Biome);
}
bool cWorld::SetAreaBiome(const cCuboid & a_Area, EMCSBiome a_Biome)
{
return SetAreaBiome(
std::min(a_Area.p1.x, a_Area.p2.x), std::max(a_Area.p1.x, a_Area.p2.x),
std::min(a_Area.p1.z, a_Area.p2.z), std::max(a_Area.p1.z, a_Area.p2.z),
a_Biome
);
}
void cWorld::SetBlock(int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE a_BlockType, NIBBLETYPE a_BlockMeta, bool a_SendToClients)
{
m_ChunkMap->SetBlock(a_BlockX, a_BlockY, a_BlockZ, a_BlockType, a_BlockMeta, a_SendToClients);
}
void cWorld::SetBlockMeta(int a_X, int a_Y, int a_Z, NIBBLETYPE a_MetaData)
{
m_ChunkMap->SetBlockMeta(a_X, a_Y, a_Z, a_MetaData);
}
NIBBLETYPE cWorld::GetBlockSkyLight(int a_X, int a_Y, int a_Z)
{
return m_ChunkMap->GetBlockSkyLight(a_X, a_Y, a_Z);
}
NIBBLETYPE cWorld::GetBlockBlockLight(int a_BlockX, int a_BlockY, int a_BlockZ)
{
return m_ChunkMap->GetBlockBlockLight(a_BlockX, a_BlockY, a_BlockZ);
}
bool cWorld::GetBlockTypeMeta(int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE & a_BlockType, NIBBLETYPE & a_BlockMeta)
{
return m_ChunkMap->GetBlockTypeMeta(a_BlockX, a_BlockY, a_BlockZ, (BLOCKTYPE &)a_BlockType, (NIBBLETYPE &)a_BlockMeta);
}
bool cWorld::GetBlockInfo(int a_BlockX, int a_BlockY, int a_BlockZ, BLOCKTYPE & a_BlockType, NIBBLETYPE & a_Meta, NIBBLETYPE & a_SkyLight, NIBBLETYPE & a_BlockLight)
{
return m_ChunkMap->GetBlockInfo(a_BlockX, a_BlockY, a_BlockZ, a_BlockType, a_Meta, a_SkyLight, a_BlockLight);
}
bool cWorld::WriteBlockArea(cBlockArea & a_Area, int a_MinBlockX, int a_MinBlockY, int a_MinBlockZ, int a_DataTypes)
{
return m_ChunkMap->WriteBlockArea(a_Area, a_MinBlockX, a_MinBlockY, a_MinBlockZ, a_DataTypes);
}
void cWorld::SpawnItemPickups(const cItems & a_Pickups, double a_BlockX, double a_BlockY, double a_BlockZ, double a_FlyAwaySpeed, bool IsPlayerCreated)
{
a_FlyAwaySpeed /= 100; // Pre-divide, so that we don't have to divide each time inside the loop
for (cItems::const_iterator itr = a_Pickups.begin(); itr != a_Pickups.end(); ++itr)
{
if (!IsValidItem(itr->m_ItemType) || (itr->m_ItemType == E_BLOCK_AIR))
{
// Don't spawn pickup if item isn't even valid; should prevent client crashing too
continue;
}
float SpeedX = (float)(a_FlyAwaySpeed * (GetTickRandomNumber(10) - 5));
float SpeedY = (float)(a_FlyAwaySpeed * GetTickRandomNumber(50));
float SpeedZ = (float)(a_FlyAwaySpeed * (GetTickRandomNumber(10) - 5));
cPickup * Pickup = new cPickup(
a_BlockX, a_BlockY, a_BlockZ,
*itr, IsPlayerCreated, SpeedX, SpeedY, SpeedZ
);
Pickup->Initialize(*this);
}
}
void cWorld::SpawnItemPickups(const cItems & a_Pickups, double a_BlockX, double a_BlockY, double a_BlockZ, double a_SpeedX, double a_SpeedY, double a_SpeedZ, bool IsPlayerCreated)
{
for (cItems::const_iterator itr = a_Pickups.begin(); itr != a_Pickups.end(); ++itr)
{
if (!IsValidItem(itr->m_ItemType) || (itr->m_ItemType == E_BLOCK_AIR))
{
continue;
}
cPickup * Pickup = new cPickup(
a_BlockX, a_BlockY, a_BlockZ,
*itr, IsPlayerCreated, (float)a_SpeedX, (float)a_SpeedY, (float)a_SpeedZ
);
Pickup->Initialize(*this);
}
}
UInt32 cWorld::SpawnFallingBlock(int a_X, int a_Y, int a_Z, BLOCKTYPE BlockType, NIBBLETYPE BlockMeta)
{
cFallingBlock * FallingBlock = new cFallingBlock(Vector3i(a_X, a_Y, a_Z), BlockType, BlockMeta);
FallingBlock->Initialize(*this);
return FallingBlock->GetUniqueID();
}
UInt32 cWorld::SpawnExperienceOrb(double a_X, double a_Y, double a_Z, int a_Reward)
{
if (a_Reward < 1)
{
LOGWARNING("%s: Attempting to create an experience orb with non-positive reward!", __FUNCTION__);
return cEntity::INVALID_ID;
}
cExpOrb * ExpOrb = new cExpOrb(a_X, a_Y, a_Z, a_Reward);
ExpOrb->Initialize(*this);
return ExpOrb->GetUniqueID();
}
UInt32 cWorld::SpawnMinecart(double a_X, double a_Y, double a_Z, int a_MinecartType, const cItem & a_Content, int a_BlockHeight)
{
cMinecart * Minecart;
switch (a_MinecartType)
{
case E_ITEM_MINECART: Minecart = new cRideableMinecart (a_X, a_Y, a_Z, a_Content, a_BlockHeight); break;
case E_ITEM_CHEST_MINECART: Minecart = new cMinecartWithChest (a_X, a_Y, a_Z); break;
case E_ITEM_FURNACE_MINECART: Minecart = new cMinecartWithFurnace (a_X, a_Y, a_Z); break;
case E_ITEM_MINECART_WITH_TNT: Minecart = new cMinecartWithTNT (a_X, a_Y, a_Z); break;
case E_ITEM_MINECART_WITH_HOPPER: Minecart = new cMinecartWithHopper (a_X, a_Y, a_Z); break;
default:
{
return cEntity::INVALID_ID;
}
} // switch (a_MinecartType)
Minecart->Initialize(*this);
return Minecart->GetUniqueID();
}
UInt32 cWorld::SpawnPrimedTNT(double a_X, double a_Y, double a_Z, int a_FuseTicks, double a_InitialVelocityCoeff)
{
cTNTEntity * TNT = new cTNTEntity(a_X, a_Y, a_Z, a_FuseTicks);
TNT->Initialize(*this);
TNT->SetSpeed(
a_InitialVelocityCoeff * (GetTickRandomNumber(2) - 1), /** -1, 0, 1 */
a_InitialVelocityCoeff * 2,
a_InitialVelocityCoeff * (GetTickRandomNumber(2) - 1)
);
return TNT->GetUniqueID();
}
void cWorld::SetBlocks(const sSetBlockVector & a_Blocks)
{
m_ChunkMap->SetBlocks(a_Blocks);
}
void cWorld::ReplaceBlocks(const sSetBlockVector & a_Blocks, BLOCKTYPE a_FilterBlockType)
{
m_ChunkMap->ReplaceBlocks(a_Blocks, a_FilterBlockType);
}
bool cWorld::GetBlocks(sSetBlockVector & a_Blocks, bool a_ContinueOnFailure)
{
return m_ChunkMap->GetBlocks(a_Blocks, a_ContinueOnFailure);
}
bool cWorld::DigBlock(int a_X, int a_Y, int a_Z)
{
cBlockHandler * Handler = cBlockInfo::GetHandler(GetBlock(a_X, a_Y, a_Z));
cChunkInterface ChunkInterface(GetChunkMap());
Handler->OnDestroyed(ChunkInterface, *this, a_X, a_Y, a_Z);
return m_ChunkMap->DigBlock(a_X, a_Y, a_Z);
}
void cWorld::SendBlockTo(int a_X, int a_Y, int a_Z, cPlayer * a_Player)
{
m_ChunkMap->SendBlockTo(a_X, a_Y, a_Z, a_Player);
}
int cWorld::GetHeight(int a_X, int a_Z)
{
return m_ChunkMap->GetHeight(a_X, a_Z);
}
bool cWorld::TryGetHeight(int a_BlockX, int a_BlockZ, int & a_Height)
{
return m_ChunkMap->TryGetHeight(a_BlockX, a_BlockZ, a_Height);
}
void cWorld::BroadcastAttachEntity(const cEntity & a_Entity, const cEntity * a_Vehicle)
{
return m_ChunkMap->BroadcastAttachEntity(a_Entity, a_Vehicle);
}
void cWorld::BroadcastBlockAction(int a_BlockX, int a_BlockY, int a_BlockZ, Byte a_Byte1, Byte a_Byte2, BLOCKTYPE a_BlockType, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastBlockAction(a_BlockX, a_BlockY, a_BlockZ, a_Byte1, a_Byte2, a_BlockType, a_Exclude);
}
void cWorld::BroadcastBlockBreakAnimation(UInt32 a_EntityID, int a_BlockX, int a_BlockY, int a_BlockZ, char a_Stage, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastBlockBreakAnimation(a_EntityID, a_BlockX, a_BlockY, a_BlockZ, a_Stage, a_Exclude);
}
void cWorld::BroadcastBlockEntity(int a_BlockX, int a_BlockY, int a_BlockZ, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastBlockEntity(a_BlockX, a_BlockY, a_BlockZ, a_Exclude);
}
void cWorld::BroadcastChat(const AString & a_Message, const cClientHandle * a_Exclude, eMessageType a_ChatPrefix)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendChat(a_Message, a_ChatPrefix);
}
}
void cWorld::BroadcastChat(const cCompositeChat & a_Message, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendChat(a_Message);
}
}
void cWorld::BroadcastChunkData(int a_ChunkX, int a_ChunkZ, cChunkDataSerializer & a_Serializer, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastChunkData(a_ChunkX, a_ChunkZ, a_Serializer, a_Exclude);
}
void cWorld::BroadcastCollectEntity(const cEntity & a_Entity, const cPlayer & a_Player, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastCollectEntity(a_Entity, a_Player, a_Exclude);
}
void cWorld::BroadcastDestroyEntity(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastDestroyEntity(a_Entity, a_Exclude);
}
void cWorld::BroadcastEntityEffect(const cEntity & a_Entity, int a_EffectID, int a_Amplifier, short a_Duration, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityEffect(a_Entity, a_EffectID, a_Amplifier, a_Duration, a_Exclude);
}
void cWorld::BroadcastEntityEquipment(const cEntity & a_Entity, short a_SlotNum, const cItem & a_Item, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityEquipment(a_Entity, a_SlotNum, a_Item, a_Exclude);
}
void cWorld::BroadcastEntityHeadLook(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityHeadLook(a_Entity, a_Exclude);
}
void cWorld::BroadcastEntityLook(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityLook(a_Entity, a_Exclude);
}
void cWorld::BroadcastEntityMetadata(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityMetadata(a_Entity, a_Exclude);
}
void cWorld::BroadcastEntityRelMove(const cEntity & a_Entity, char a_RelX, char a_RelY, char a_RelZ, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityRelMove(a_Entity, a_RelX, a_RelY, a_RelZ, a_Exclude);
}
void cWorld::BroadcastEntityRelMoveLook(const cEntity & a_Entity, char a_RelX, char a_RelY, char a_RelZ, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityRelMoveLook(a_Entity, a_RelX, a_RelY, a_RelZ, a_Exclude);
}
void cWorld::BroadcastEntityStatus(const cEntity & a_Entity, char a_Status, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityStatus(a_Entity, a_Status, a_Exclude);
}
void cWorld::BroadcastEntityVelocity(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityVelocity(a_Entity, a_Exclude);
}
void cWorld::BroadcastEntityAnimation(const cEntity & a_Entity, char a_Animation, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastEntityAnimation(a_Entity, a_Animation, a_Exclude);
}
void cWorld::BroadcastPlayerListAddPlayer(const cPlayer & a_Player, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendPlayerListAddPlayer(a_Player);
}
}
void cWorld::BroadcastPlayerListRemovePlayer(const cPlayer & a_Player, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendPlayerListRemovePlayer(a_Player);
}
}
void cWorld::BroadcastPlayerListUpdateGameMode(const cPlayer & a_Player, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendPlayerListUpdateGameMode(a_Player);
}
}
void cWorld::BroadcastPlayerListUpdatePing(const cPlayer & a_Player, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendPlayerListUpdatePing(a_Player);
}
}
void cWorld::BroadcastPlayerListUpdateDisplayName(const cPlayer & a_Player, const AString & a_CustomName, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendPlayerListUpdateDisplayName(a_Player, a_CustomName);
}
}
void cWorld::BroadcastRemoveEntityEffect(const cEntity & a_Entity, int a_EffectID, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastRemoveEntityEffect(a_Entity, a_EffectID, a_Exclude);
}
void cWorld::BroadcastScoreboardObjective(const AString & a_Name, const AString & a_DisplayName, Byte a_Mode)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendScoreboardObjective(a_Name, a_DisplayName, a_Mode);
}
}
void cWorld::BroadcastScoreUpdate(const AString & a_Objective, const AString & a_Player, cObjective::Score a_Score, Byte a_Mode)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendScoreUpdate(a_Objective, a_Player, a_Score, a_Mode);
}
}
void cWorld::BroadcastDisplayObjective(const AString & a_Objective, cScoreboard::eDisplaySlot a_Display)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendDisplayObjective(a_Objective, a_Display);
}
}
void cWorld::BroadcastSoundEffect(const AString & a_SoundName, double a_X, double a_Y, double a_Z, float a_Volume, float a_Pitch, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastSoundEffect(a_SoundName, a_X, a_Y, a_Z, a_Volume, a_Pitch, a_Exclude);
}
void cWorld::BroadcastSoundParticleEffect(int a_EffectID, int a_SrcX, int a_SrcY, int a_SrcZ, int a_Data, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastSoundParticleEffect(a_EffectID, a_SrcX, a_SrcY, a_SrcZ, a_Data, a_Exclude);
}
void cWorld::BroadcastSpawnEntity(cEntity & a_Entity, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastSpawnEntity(a_Entity, a_Exclude);
}
void cWorld::BroadcastTeleportEntity(const cEntity & a_Entity, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendTeleportEntity(a_Entity);
}
}
void cWorld::BroadcastThunderbolt(int a_BlockX, int a_BlockY, int a_BlockZ, const cClientHandle * a_Exclude)
{
m_ChunkMap->BroadcastThunderbolt(a_BlockX, a_BlockY, a_BlockZ, a_Exclude);
}
void cWorld::BroadcastTimeUpdate(const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendTimeUpdate(std::chrono::duration_cast<cTickTimeLong>(m_WorldAge).count(), std::chrono::duration_cast<cTickTimeLong>(m_TimeOfDay).count(), m_IsDaylightCycleEnabled);
}
}
void cWorld::BroadcastUseBed(const cEntity & a_Entity, int a_BlockX, int a_BlockY, int a_BlockZ)
{
m_ChunkMap->BroadcastUseBed(a_Entity, a_BlockX, a_BlockY, a_BlockZ);
}
void cWorld::BroadcastWeather(eWeather a_Weather, const cClientHandle * a_Exclude)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch == a_Exclude) || (ch == nullptr) || !ch->IsLoggedIn() || ch->IsDestroyed())
{
continue;
}
ch->SendWeather(a_Weather);
}
}
void cWorld::SendBlockEntity(int a_BlockX, int a_BlockY, int a_BlockZ, cClientHandle & a_Client)
{
m_ChunkMap->SendBlockEntity(a_BlockX, a_BlockY, a_BlockZ, a_Client);
}
void cWorld::MarkRedstoneDirty(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->MarkRedstoneDirty(a_ChunkX, a_ChunkZ);
}
void cWorld::MarkChunkDirty(int a_ChunkX, int a_ChunkZ, bool a_MarkRedstoneDirty)
{
m_ChunkMap->MarkChunkDirty(a_ChunkX, a_ChunkZ, a_MarkRedstoneDirty);
}
void cWorld::MarkChunkSaving(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->MarkChunkSaving(a_ChunkX, a_ChunkZ);
}
void cWorld::MarkChunkSaved (int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->MarkChunkSaved (a_ChunkX, a_ChunkZ);
}
void cWorld::QueueSetChunkData(const cSetChunkDataPtr & a_SetChunkData)
{
ASSERT(IsChunkQueued(a_SetChunkData->GetChunkX(), a_SetChunkData->GetChunkZ()));
// Validate biomes, if needed:
if (!a_SetChunkData->AreBiomesValid())
{
// The biomes are not assigned, get them from the generator:
m_Generator.GenerateBiomes(a_SetChunkData->GetChunkX(), a_SetChunkData->GetChunkZ(), a_SetChunkData->GetBiomes());
a_SetChunkData->MarkBiomesValid();
}
// Validate heightmap, if needed:
if (!a_SetChunkData->IsHeightMapValid())
{
a_SetChunkData->CalculateHeightMap();
}
// Store a copy of the data in the queue:
// TODO: If the queue is too large, wait for it to get processed. Not likely, though.
cCSLock Lock(m_CSSetChunkDataQueue);
m_SetChunkDataQueue.push_back(a_SetChunkData);
}
void cWorld::SetChunkData(cSetChunkData & a_SetChunkData)
{
ASSERT(a_SetChunkData.AreBiomesValid());
ASSERT(a_SetChunkData.IsHeightMapValid());
m_ChunkMap->SetChunkData(a_SetChunkData);
// Initialize the entities (outside the m_ChunkMap's CS, to fix FS #347):
cEntityList Entities;
std::swap(a_SetChunkData.GetEntities(), Entities);
for (cEntityList::iterator itr = Entities.begin(), end = Entities.end(); itr != end; ++itr)
{
(*itr)->Initialize(*this);
}
// If a client is requesting this chunk, send it to them:
int ChunkX = a_SetChunkData.GetChunkX();
int ChunkZ = a_SetChunkData.GetChunkZ();
if (m_ChunkMap->HasChunkAnyClients(ChunkX, ChunkZ))
{
m_ChunkSender.ChunkReady(ChunkX, ChunkZ);
}
// Save the chunk right after generating, so that we don't have to generate it again on next run
if (a_SetChunkData.ShouldMarkDirty())
{
m_Storage.QueueSaveChunk(ChunkX, ChunkZ);
}
}
void cWorld::ChunkLighted(
int a_ChunkX, int a_ChunkZ,
const cChunkDef::BlockNibbles & a_BlockLight,
const cChunkDef::BlockNibbles & a_SkyLight
)
{
m_ChunkMap->ChunkLighted(a_ChunkX, a_ChunkZ, a_BlockLight, a_SkyLight);
}
bool cWorld::GetChunkData(int a_ChunkX, int a_ChunkZ, cChunkDataCallback & a_Callback)
{
return m_ChunkMap->GetChunkData(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::GetChunkBlockTypes(int a_ChunkX, int a_ChunkZ, BLOCKTYPE * a_BlockTypes)
{
return m_ChunkMap->GetChunkBlockTypes(a_ChunkX, a_ChunkZ, a_BlockTypes);
}
bool cWorld::IsChunkQueued(int a_ChunkX, int a_ChunkZ) const
{
return m_ChunkMap->IsChunkQueued(a_ChunkX, a_ChunkZ);
}
bool cWorld::IsChunkValid(int a_ChunkX, int a_ChunkZ) const
{
return m_ChunkMap->IsChunkValid(a_ChunkX, a_ChunkZ);
}
bool cWorld::HasChunkAnyClients(int a_ChunkX, int a_ChunkZ) const
{
return m_ChunkMap->HasChunkAnyClients(a_ChunkX, a_ChunkZ);
}
void cWorld::UnloadUnusedChunks(void)
{
m_LastUnload = std::chrono::duration_cast<cTickTimeLong>(m_WorldAge);
m_ChunkMap->UnloadUnusedChunks();
}
void cWorld::QueueUnloadUnusedChunks(void)
{
QueueTask(cpp14::make_unique<cWorld::cTaskUnloadUnusedChunks>());
}
void cWorld::CollectPickupsByPlayer(cPlayer & a_Player)
{
m_ChunkMap->CollectPickupsByPlayer(a_Player);
}
void cWorld::AddPlayer(cPlayer * a_Player)
{
cCSLock Lock(m_CSPlayersToAdd);
m_PlayersToAdd.push_back(a_Player);
}
void cWorld::RemovePlayer(cPlayer * a_Player, bool a_RemoveFromChunk)
{
if (a_RemoveFromChunk)
{
// To prevent iterator invalidations when an entity goes through a portal and calls this function whilst being ticked by cChunk
// we should not change cChunk's entity list if asked not to
m_ChunkMap->RemoveEntity(a_Player);
}
{
cCSLock Lock(m_CSPlayersToAdd);
m_PlayersToAdd.remove(a_Player);
}
{
cCSLock Lock(m_CSPlayers);
LOGD("Removing player %s from world \"%s\"", a_Player->GetName().c_str(), m_WorldName.c_str());
m_Players.remove(a_Player);
}
// Remove the player's client from the list of clients to be ticked:
cClientHandle * Client = a_Player->GetClientHandle();
if (Client != nullptr)
{
Client->RemoveFromWorld();
m_ChunkMap->RemoveClientFromChunks(Client);
cCSLock Lock(m_CSClients);
m_ClientsToRemove.push_back(Client);
}
}
bool cWorld::ForEachPlayer(cPlayerListCallback & a_Callback)
{
// Calls the callback for each player in the list
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(), itr2 = itr; itr != m_Players.end(); itr = itr2)
{
++itr2;
if (a_Callback.Item(*itr))
{
return false;
}
} // for itr - m_Players[]
return true;
}
bool cWorld::DoWithPlayer(const AString & a_PlayerName, cPlayerListCallback & a_Callback)
{
// Calls the callback for the specified player in the list
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
if (NoCaseCompare((*itr)->GetName(), a_PlayerName) == 0)
{
a_Callback.Item(*itr);
return true;
}
} // for itr - m_Players[]
return false;
}
bool cWorld::FindAndDoWithPlayer(const AString & a_PlayerNameHint, cPlayerListCallback & a_Callback)
{
cPlayer * BestMatch = nullptr;
size_t BestRating = 0;
size_t NameLength = a_PlayerNameHint.length();
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
size_t Rating = RateCompareString (a_PlayerNameHint, (*itr)->GetName());
if (Rating >= BestRating)
{
BestMatch = *itr;
BestRating = Rating;
}
if (Rating == NameLength) // Perfect match
{
break;
}
} // for itr - m_Players[]
if (BestMatch != nullptr)
{
return a_Callback.Item (BestMatch);
}
return false;
}
bool cWorld::DoWithPlayerByUUID(const AString & a_PlayerUUID, cPlayerListCallback & a_Callback)
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
if ((*itr)->GetUUID() == a_PlayerUUID)
{
return a_Callback.Item(*itr);
}
}
return false;
}
// TODO: This interface is dangerous!
cPlayer * cWorld::FindClosestPlayer(const Vector3d & a_Pos, float a_SightLimit, bool a_CheckLineOfSight)
{
cTracer LineOfSight(this);
double ClosestDistance = a_SightLimit;
cPlayer * ClosestPlayer = nullptr;
cCSLock Lock(m_CSPlayers);
for (cPlayerList::const_iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
Vector3f Pos = (*itr)->GetPosition();
double Distance = (Pos - a_Pos).Length();
if (Distance < ClosestDistance)
{
if (a_CheckLineOfSight)
{
if (!LineOfSight.Trace(a_Pos, (Pos - a_Pos), (int)(Pos - a_Pos).Length()))
{
ClosestDistance = Distance;
ClosestPlayer = *itr;
}
}
else
{
ClosestDistance = Distance;
ClosestPlayer = *itr;
}
}
}
return ClosestPlayer;
}
void cWorld::SendPlayerList(cPlayer * a_DestPlayer)
{
// Sends the playerlist to a_DestPlayer
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(); itr != m_Players.end(); ++itr)
{
cClientHandle * ch = (*itr)->GetClientHandle();
if ((ch != nullptr) && !ch->IsDestroyed())
{
a_DestPlayer->GetClientHandle()->SendPlayerListAddPlayer(*(*itr));
}
}
}
bool cWorld::ForEachEntity(cEntityCallback & a_Callback)
{
return m_ChunkMap->ForEachEntity(a_Callback);
}
bool cWorld::ForEachEntityInChunk(int a_ChunkX, int a_ChunkZ, cEntityCallback & a_Callback)
{
return m_ChunkMap->ForEachEntityInChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::ForEachEntityInBox(const cBoundingBox & a_Box, cEntityCallback & a_Callback)
{
return m_ChunkMap->ForEachEntityInBox(a_Box, a_Callback);
}
bool cWorld::DoWithEntityByID(UInt32 a_UniqueID, cEntityCallback & a_Callback)
{
// First check the entities-to-add:
{
cCSLock Lock(m_CSEntitiesToAdd);
for (auto & ent: m_EntitiesToAdd)
{
if (ent->GetUniqueID() == a_UniqueID)
{
a_Callback.Item(ent);
return true;
}
} // for ent - m_EntitiesToAdd[]
}
// Then check the chunkmap:
return m_ChunkMap->DoWithEntityByID(a_UniqueID, a_Callback);
}
void cWorld::CompareChunkClients(int a_ChunkX1, int a_ChunkZ1, int a_ChunkX2, int a_ChunkZ2, cClientDiffCallback & a_Callback)
{
m_ChunkMap->CompareChunkClients(a_ChunkX1, a_ChunkZ1, a_ChunkX2, a_ChunkZ2, a_Callback);
}
bool cWorld::AddChunkClient(int a_ChunkX, int a_ChunkZ, cClientHandle * a_Client)
{
return m_ChunkMap->AddChunkClient(a_ChunkX, a_ChunkZ, a_Client);
}
void cWorld::RemoveChunkClient(int a_ChunkX, int a_ChunkZ, cClientHandle * a_Client)
{
m_ChunkMap->RemoveChunkClient(a_ChunkX, a_ChunkZ, a_Client);
}
void cWorld::RemoveClientFromChunks(cClientHandle * a_Client)
{
m_ChunkMap->RemoveClientFromChunks(a_Client);
}
void cWorld::SendChunkTo(int a_ChunkX, int a_ChunkZ, cChunkSender::eChunkPriority a_Priority, cClientHandle * a_Client)
{
m_ChunkSender.QueueSendChunkTo(a_ChunkX, a_ChunkZ, a_Priority, a_Client);
}
void cWorld::ForceSendChunkTo(int a_ChunkX, int a_ChunkZ, cChunkSender::eChunkPriority a_Priority, cClientHandle * a_Client)
{
a_Client->AddWantedChunk(a_ChunkX, a_ChunkZ);
m_ChunkSender.QueueSendChunkTo(a_ChunkX, a_ChunkZ, a_Priority, a_Client);
}
void cWorld::RemoveClientFromChunkSender(cClientHandle * a_Client)
{
m_ChunkSender.RemoveClient(a_Client);
}
void cWorld::TouchChunk(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->TouchChunk(a_ChunkX, a_ChunkZ);
}
void cWorld::PrepareChunk(int a_ChunkX, int a_ChunkZ, cChunkCoordCallback * a_CallAfter)
{
m_ChunkMap->PrepareChunk(a_ChunkX, a_ChunkZ, a_CallAfter);
}
void cWorld::ChunkLoadFailed(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->ChunkLoadFailed(a_ChunkX, a_ChunkZ);
}
bool cWorld::SetSignLines(int a_BlockX, int a_BlockY, int a_BlockZ, const AString & a_Line1, const AString & a_Line2, const AString & a_Line3, const AString & a_Line4, cPlayer * a_Player)
{
AString Line1(a_Line1);
AString Line2(a_Line2);
AString Line3(a_Line3);
AString Line4(a_Line4);
if (cRoot::Get()->GetPluginManager()->CallHookUpdatingSign(*this, a_BlockX, a_BlockY, a_BlockZ, Line1, Line2, Line3, Line4, a_Player))
{
return false;
}
if (m_ChunkMap->SetSignLines(a_BlockX, a_BlockY, a_BlockZ, Line1, Line2, Line3, Line4))
{
cRoot::Get()->GetPluginManager()->CallHookUpdatedSign(*this, a_BlockX, a_BlockY, a_BlockZ, Line1, Line2, Line3, Line4, a_Player);
return true;
}
return false;
}
bool cWorld::SetCommandBlockCommand(int a_BlockX, int a_BlockY, int a_BlockZ, const AString & a_Command)
{
class cUpdateCommandBlock : public cCommandBlockCallback
{
AString m_Command;
public:
cUpdateCommandBlock(const AString & a_CallbackCommand) : m_Command(a_CallbackCommand) {}
virtual bool Item(cCommandBlockEntity * a_CommandBlock) override
{
a_CommandBlock->SetCommand(m_Command);
return false;
}
} CmdBlockCB (a_Command);
return DoWithCommandBlockAt(a_BlockX, a_BlockY, a_BlockZ, CmdBlockCB);
}
bool cWorld::IsTrapdoorOpen(int a_BlockX, int a_BlockY, int a_BlockZ)
{
BLOCKTYPE Block;
NIBBLETYPE Meta;
GetBlockTypeMeta(a_BlockX, a_BlockY, a_BlockZ, Block, Meta);
if ((Block != E_BLOCK_TRAPDOOR) && (Block != E_BLOCK_IRON_TRAPDOOR))
{
return false;
}
return (Meta & 0x4) > 0;
}
bool cWorld::SetTrapdoorOpen(int a_BlockX, int a_BlockY, int a_BlockZ, bool a_Open)
{
BLOCKTYPE Block;
NIBBLETYPE Meta;
GetBlockTypeMeta(a_BlockX, a_BlockY, a_BlockZ, Block, Meta);
if ((Block != E_BLOCK_TRAPDOOR) && (Block != E_BLOCK_IRON_TRAPDOOR))
{
return false;
}
bool IsOpen = (Meta & 0x4) > 0;
if (a_Open != IsOpen)
{
SetBlockMeta(a_BlockX, a_BlockY, a_BlockZ, Meta ^ 0x4);
BroadcastSoundParticleEffect(1003, a_BlockX, a_BlockY, a_BlockZ, 0);
return true;
}
return false;
}
void cWorld::RegenerateChunk(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->MarkChunkRegenerating(a_ChunkX, a_ChunkZ);
m_Generator.QueueGenerateChunk(a_ChunkX, a_ChunkZ, true);
}
void cWorld::GenerateChunk(int a_ChunkX, int a_ChunkZ)
{
m_ChunkMap->GenerateChunk(a_ChunkX, a_ChunkZ);
}
void cWorld::QueueLightChunk(int a_ChunkX, int a_ChunkZ, cChunkCoordCallback * a_Callback)
{
m_Lighting.QueueChunk(a_ChunkX, a_ChunkZ, a_Callback);
}
bool cWorld::IsChunkLighted(int a_ChunkX, int a_ChunkZ)
{
return m_ChunkMap->IsChunkLighted(a_ChunkX, a_ChunkZ);
}
bool cWorld::ForEachChunkInRect(int a_MinChunkX, int a_MaxChunkX, int a_MinChunkZ, int a_MaxChunkZ, cChunkDataCallback & a_Callback)
{
return m_ChunkMap->ForEachChunkInRect(a_MinChunkX, a_MaxChunkX, a_MinChunkZ, a_MaxChunkZ, a_Callback);
}
void cWorld::SaveAllChunks(void)
{
m_LastSave = std::chrono::duration_cast<cTickTimeLong>(m_WorldAge);
m_ChunkMap->SaveAllChunks();
}
void cWorld::QueueSaveAllChunks(void)
{
QueueTask(std::make_shared<cWorld::cTaskSaveAllChunks>());
}
void cWorld::QueueTask(cTaskPtr a_Task)
{
cCSLock Lock(m_CSTasks);
m_Tasks.push_back(std::move(a_Task));
}
void cWorld::ScheduleTask(int a_DelayTicks, cTaskPtr a_Task)
{
Int64 TargetTick = a_DelayTicks + std::chrono::duration_cast<cTickTimeLong>(m_WorldAge).count();
// Insert the task into the list of scheduled tasks, ordered by its target tick
cCSLock Lock(m_CSScheduledTasks);
for (cScheduledTasks::iterator itr = m_ScheduledTasks.begin(), end = m_ScheduledTasks.end(); itr != end; ++itr)
{
if ((*itr)->m_TargetTick >= TargetTick)
{
m_ScheduledTasks.insert(itr, cScheduledTaskPtr(new cScheduledTask(TargetTick, a_Task)));
return;
}
}
m_ScheduledTasks.push_back(cScheduledTaskPtr(new cScheduledTask(TargetTick, a_Task)));
}
void cWorld::AddEntity(cEntity * a_Entity)
{
cCSLock Lock(m_CSEntitiesToAdd);
m_EntitiesToAdd.push_back(a_Entity);
}
bool cWorld::HasEntity(UInt32 a_UniqueID)
{
// Check if the entity is in the queue to be added to the world:
{
cCSLock Lock(m_CSEntitiesToAdd);
for (cEntityList::const_iterator itr = m_EntitiesToAdd.begin(), end = m_EntitiesToAdd.end(); itr != end; ++itr)
{
if ((*itr)->GetUniqueID() == a_UniqueID)
{
return true;
}
} // for itr - m_EntitiesToAdd[]
}
// Check if the entity is in the chunkmap:
if (m_ChunkMap.get() == nullptr)
{
// Chunkmap has already been destroyed, there are no entities anymore.
return false;
}
return m_ChunkMap->HasEntity(a_UniqueID);
}
/*
unsigned int cWorld::GetNumPlayers(void)
{
cCSLock Lock(m_CSPlayers);
return m_Players.size();
}
*/
int cWorld::GetNumChunks(void) const
{
return m_ChunkMap->GetNumChunks();
}
void cWorld::GetChunkStats(int & a_NumValid, int & a_NumDirty, int & a_NumInLightingQueue)
{
m_ChunkMap->GetChunkStats(a_NumValid, a_NumDirty);
a_NumInLightingQueue = (int) m_Lighting.GetQueueLength();
}
void cWorld::TickQueuedBlocks(void)
{
if (m_BlockTickQueue.empty())
{
return;
}
m_BlockTickQueueCopy.clear();
m_BlockTickQueue.swap(m_BlockTickQueueCopy);
for (std::vector<BlockTickQueueItem *>::iterator itr = m_BlockTickQueueCopy.begin(); itr != m_BlockTickQueueCopy.end(); ++itr)
{
BlockTickQueueItem * Block = (*itr);
Block->TicksToWait -= 1;
if (Block->TicksToWait <= 0)
{
// TODO: Handle the case when the chunk is already unloaded
m_ChunkMap->TickBlock(Block->X, Block->Y, Block->Z);
delete Block; // We don't have to remove it from the vector, this will happen automatically on the next tick
}
else
{
m_BlockTickQueue.push_back(Block); // Keep the block in the queue
}
} // for itr - m_BlockTickQueueCopy[]
}
void cWorld::QueueBlockForTick(int a_BlockX, int a_BlockY, int a_BlockZ, int a_TicksToWait)
{
BlockTickQueueItem * Block = new BlockTickQueueItem;
Block->X = a_BlockX;
Block->Y = a_BlockY;
Block->Z = a_BlockZ;
Block->TicksToWait = a_TicksToWait;
m_BlockTickQueue.push_back(Block);
}
bool cWorld::IsBlockDirectlyWatered(int a_BlockX, int a_BlockY, int a_BlockZ)
{
return (
IsBlockWater(GetBlock(a_BlockX - 1, a_BlockY, a_BlockZ)) ||
IsBlockWater(GetBlock(a_BlockX + 1, a_BlockY, a_BlockZ)) ||
IsBlockWater(GetBlock(a_BlockX, a_BlockY, a_BlockZ - 1)) ||
IsBlockWater(GetBlock(a_BlockX, a_BlockY, a_BlockZ + 1))
);
}
UInt32 cWorld::SpawnMob(double a_PosX, double a_PosY, double a_PosZ, eMonsterType a_MonsterType)
{
cMonster * Monster = nullptr;
Monster = cMonster::NewMonsterFromType(a_MonsterType);
if (Monster == nullptr)
{
return cEntity::INVALID_ID;
}
Monster->SetPosition(a_PosX, a_PosY, a_PosZ);
return SpawnMobFinalize(Monster);
}
UInt32 cWorld::SpawnMobFinalize(cMonster * a_Monster)
{
ASSERT(a_Monster != nullptr);
// Give the mob full health.
a_Monster->SetHealth(a_Monster->GetMaxHealth());
// A plugin doesn't agree with the spawn. bail out.
if (cPluginManager::Get()->CallHookSpawningMonster(*this, *a_Monster))
{
delete a_Monster;
a_Monster = nullptr;
return cEntity::INVALID_ID;
}
// Initialize the monster into the current world.
if (!a_Monster->Initialize(*this))
{
delete a_Monster;
a_Monster = nullptr;
return cEntity::INVALID_ID;
}
cPluginManager::Get()->CallHookSpawnedMonster(*this, *a_Monster);
return a_Monster->GetUniqueID();
}
UInt32 cWorld::CreateProjectile(double a_PosX, double a_PosY, double a_PosZ, cProjectileEntity::eKind a_Kind, cEntity * a_Creator, const cItem * a_Item, const Vector3d * a_Speed)
{
cProjectileEntity * Projectile = cProjectileEntity::Create(a_Kind, a_Creator, a_PosX, a_PosY, a_PosZ, a_Item, a_Speed);
if (Projectile == nullptr)
{
return cEntity::INVALID_ID;
}
if (!Projectile->Initialize(*this))
{
delete Projectile;
Projectile = nullptr;
return cEntity::INVALID_ID;
}
return Projectile->GetUniqueID();
}
void cWorld::TabCompleteUserName(const AString & a_Text, AStringVector & a_Results)
{
typedef std::pair<AString::size_type, AString> pair_t;
size_t LastSpace = a_Text.find_last_of(" "); // Find the position of the last space
AString LastWord = a_Text.substr(LastSpace + 1, a_Text.length()); // Find the last word
if (LastWord.empty())
{
return;
}
std::vector<pair_t> UsernamesByWeight;
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = m_Players.begin(), end = m_Players.end(); itr != end; ++itr)
{
AString PlayerName ((*itr)->GetName());
if ((*itr)->HasCustomName())
{
PlayerName = (*itr)->GetCustomName();
}
AString::size_type Found = PlayerName.find(LastWord); // Try to find last word in playername
if (Found == AString::npos)
{
continue; // No match
}
UsernamesByWeight.push_back(std::make_pair(Found, PlayerName)); // Match! Store it with the position of the match as a weight
}
Lock.Unlock();
std::sort(UsernamesByWeight.begin(), UsernamesByWeight.end()); // Sort lexicographically (by the first value, then second), so higher weights (usernames with match closer to start) come first (#1274)
/* TODO: Uncomment once migrated to C++11
std::transform(
UsernamesByWeight.begin(),
UsernamesByWeight.end(),
std::back_inserter(a_Results),
[](const pair_t & p) { return p.first; }
);
*/
a_Results.reserve(UsernamesByWeight.size());
for (std::vector<pair_t>::const_iterator itr = UsernamesByWeight.begin(); itr != UsernamesByWeight.end(); ++itr)
{
a_Results.push_back(itr->second);
}
}
void cWorld::SetChunkAlwaysTicked(int a_ChunkX, int a_ChunkZ, bool a_AlwaysTicked)
{
m_ChunkMap->SetChunkAlwaysTicked(a_ChunkX, a_ChunkZ, a_AlwaysTicked);
}
cRedstoneSimulator * cWorld::InitializeRedstoneSimulator(cIniFile & a_IniFile)
{
AString SimulatorName = a_IniFile.GetValueSet("Physics", "RedstoneSimulator", "Incremental");
if (SimulatorName.empty())
{
LOGWARNING("[Physics] RedstoneSimulator not present or empty in %s, using the default of \"Incremental\".", GetIniFileName().c_str());
SimulatorName = "Incremental";
}
cRedstoneSimulator * res = nullptr;
if (NoCaseCompare(SimulatorName, "Incremental") == 0)
{
res = new cIncrementalRedstoneSimulator(*this);
}
else if (NoCaseCompare(SimulatorName, "noop") == 0)
{
res = new cRedstoneNoopSimulator(*this);
}
m_SimulatorManager->RegisterSimulator(res, 1);
return res;
}
cFluidSimulator * cWorld::InitializeFluidSimulator(cIniFile & a_IniFile, const char * a_FluidName, BLOCKTYPE a_SimulateBlock, BLOCKTYPE a_StationaryBlock)
{
AString SimulatorNameKey;
Printf(SimulatorNameKey, "%sSimulator", a_FluidName);
AString SimulatorSectionName;
Printf(SimulatorSectionName, "%sSimulator", a_FluidName);
AString SimulatorName = a_IniFile.GetValueSet("Physics", SimulatorNameKey, "Vanilla");
if (SimulatorName.empty())
{
LOGWARNING("[Physics] %s not present or empty in %s, using the default of \"Vanilla\".", SimulatorNameKey.c_str(), GetIniFileName().c_str());
SimulatorName = "Vanilla";
}
cFluidSimulator * res = nullptr;
bool IsWater = (strcmp(a_FluidName, "Water") == 0); // Used for defaults
int Rate = 1;
if (
(NoCaseCompare(SimulatorName, "vaporize") == 0) ||
(NoCaseCompare(SimulatorName, "vaporise") == 0)
)
{
res = new cVaporizeFluidSimulator(*this, a_SimulateBlock, a_StationaryBlock);
}
else if (
(NoCaseCompare(SimulatorName, "noop") == 0) ||
(NoCaseCompare(SimulatorName, "nop") == 0) ||
(NoCaseCompare(SimulatorName, "null") == 0) ||
(NoCaseCompare(SimulatorName, "nil") == 0)
)
{
res = new cNoopFluidSimulator(*this, a_SimulateBlock, a_StationaryBlock);
}
else
{
int Falloff = a_IniFile.GetValueSetI(SimulatorSectionName, "Falloff", IsWater ? 1 : 2);
int TickDelay = a_IniFile.GetValueSetI(SimulatorSectionName, "TickDelay", IsWater ? 5 : 30);
int NumNeighborsForSource = a_IniFile.GetValueSetI(SimulatorSectionName, "NumNeighborsForSource", IsWater ? 2 : -1);
if ((Falloff > 15) || (Falloff < 0))
{
LOGWARNING("Falloff for %s simulator is out of range, assuming default of %d", a_FluidName, IsWater ? 1 : 2);
Falloff = IsWater ? 1 : 2;
}
if (NoCaseCompare(SimulatorName, "floody") == 0)
{
res = new cFloodyFluidSimulator(*this, a_SimulateBlock, a_StationaryBlock, static_cast<NIBBLETYPE>(Falloff), TickDelay, NumNeighborsForSource);
}
else if (NoCaseCompare(SimulatorName, "vanilla") == 0)
{
res = new cVanillaFluidSimulator(*this, a_SimulateBlock, a_StationaryBlock, static_cast<NIBBLETYPE>(Falloff), TickDelay, NumNeighborsForSource);
}
else
{
// The simulator name doesn't match anything we have, issue a warning:
LOGWARNING("%s [Physics]:%s specifies an unknown simulator, using the default \"Vanilla\".", GetIniFileName().c_str(), SimulatorNameKey.c_str());
res = new cVanillaFluidSimulator(*this, a_SimulateBlock, a_StationaryBlock, static_cast<NIBBLETYPE>(Falloff), TickDelay, NumNeighborsForSource);
}
}
m_SimulatorManager->RegisterSimulator(res, Rate);
return res;
}
void cWorld::AddQueuedPlayers(void)
{
ASSERT(m_TickThread.IsCurrentThread());
// Grab the list of players to add, it has to be locked to access it:
cPlayerList PlayersToAdd;
{
cCSLock Lock(m_CSPlayersToAdd);
std::swap(PlayersToAdd, m_PlayersToAdd);
}
// Add all the players in the grabbed list:
{
cCSLock Lock(m_CSPlayers);
for (cPlayerList::iterator itr = PlayersToAdd.begin(), end = PlayersToAdd.end(); itr != end; ++itr)
{
ASSERT(std::find(m_Players.begin(), m_Players.end(), *itr) == m_Players.end()); // Is it already in the list? HOW?
LOGD("Adding player %s to world \"%s\".", (*itr)->GetName().c_str(), m_WorldName.c_str());
m_Players.push_back(*itr);
(*itr)->SetWorld(this);
// Add to chunkmap, if not already there (Spawn vs MoveToWorld):
m_ChunkMap->AddEntityIfNotPresent(*itr);
} // for itr - PlayersToAdd[]
} // Lock(m_CSPlayers)
// Add all the players' clienthandles:
{
cCSLock Lock(m_CSClients);
for (cPlayerList::iterator itr = PlayersToAdd.begin(), end = PlayersToAdd.end(); itr != end; ++itr)
{
cClientHandlePtr Client = (*itr)->GetClientHandlePtr();
if (Client != nullptr)
{
m_Clients.push_back(Client);
}
} // for itr - PlayersToAdd[]
} // Lock(m_CSClients)
// Stream chunks to all eligible clients:
for (cPlayerList::iterator itr = PlayersToAdd.begin(), end = PlayersToAdd.end(); itr != end; ++itr)
{
cClientHandle * Client = (*itr)->GetClientHandle();
if (Client != nullptr)
{
Client->SendPlayerMoveLook();
Client->SendHealth();
Client->SendWholeInventory(*(*itr)->GetWindow());
}
} // for itr - PlayersToAdd[]
}
////////////////////////////////////////////////////////////////////////////////
// cWorld::cTaskSaveAllChunks:
void cWorld::cTaskSaveAllChunks::Run(cWorld & a_World)
{
a_World.SaveAllChunks();
}
////////////////////////////////////////////////////////////////////////////////
// cWorld::cTaskUnloadUnusedChunks
void cWorld::cTaskUnloadUnusedChunks::Run(cWorld & a_World)
{
a_World.UnloadUnusedChunks();
}
////////////////////////////////////////////////////////////////////////////////
// cWorld::cTaskSendBlockToAllPlayers
cWorld::cTaskSendBlockToAllPlayers::cTaskSendBlockToAllPlayers(std::vector<Vector3i> & a_SendQueue) :
m_SendQueue(a_SendQueue)
{
}
void cWorld::cTaskSendBlockToAllPlayers::Run(cWorld & a_World)
{
class cPlayerCallback :
public cPlayerListCallback
{
public:
cPlayerCallback(std::vector<Vector3i> & a_SendQueue, cWorld & a_CallbackWorld) :
m_SendQueue(a_SendQueue),
m_World(a_CallbackWorld)
{
}
virtual bool Item(cPlayer * a_Player)
{
for (std::vector<Vector3i>::const_iterator itr = m_SendQueue.begin(); itr != m_SendQueue.end(); ++itr)
{
m_World.SendBlockTo(itr->x, itr->y, itr->z, a_Player);
}
return false;
}
private:
std::vector<Vector3i> m_SendQueue;
cWorld & m_World;
} PlayerCallback(m_SendQueue, a_World);
a_World.ForEachPlayer(PlayerCallback);
}
////////////////////////////////////////////////////////////////////////////////
// cWorld::cChunkGeneratorCallbacks:
cWorld::cChunkGeneratorCallbacks::cChunkGeneratorCallbacks(cWorld & a_World) :
m_World(&a_World)
{
}
void cWorld::cChunkGeneratorCallbacks::OnChunkGenerated(cChunkDesc & a_ChunkDesc)
{
cChunkDef::BlockNibbles BlockMetas;
a_ChunkDesc.CompressBlockMetas(BlockMetas);
cSetChunkDataPtr SetChunkData(new cSetChunkData(
a_ChunkDesc.GetChunkX(), a_ChunkDesc.GetChunkZ(),
a_ChunkDesc.GetBlockTypes(), BlockMetas,
nullptr, nullptr, // We don't have lighting, chunk will be lighted when needed
&a_ChunkDesc.GetHeightMap(), &a_ChunkDesc.GetBiomeMap(),
std::move(a_ChunkDesc.GetEntities()), std::move(a_ChunkDesc.GetBlockEntities()),
true
));
SetChunkData->RemoveInvalidBlockEntities();
m_World->QueueSetChunkData(SetChunkData);
}
bool cWorld::cChunkGeneratorCallbacks::IsChunkValid(int a_ChunkX, int a_ChunkZ)
{
return m_World->IsChunkValid(a_ChunkX, a_ChunkZ);
}
bool cWorld::cChunkGeneratorCallbacks::IsChunkQueued(int a_ChunkX, int a_ChunkZ)
{
return m_World->IsChunkQueued(a_ChunkX, a_ChunkZ);
}
bool cWorld::cChunkGeneratorCallbacks::HasChunkAnyClients(int a_ChunkX, int a_ChunkZ)
{
return m_World->HasChunkAnyClients(a_ChunkX, a_ChunkZ);
}
void cWorld::cChunkGeneratorCallbacks::CallHookChunkGenerating(cChunkDesc & a_ChunkDesc)
{
cPluginManager::Get()->CallHookChunkGenerating(
*m_World, a_ChunkDesc.GetChunkX(), a_ChunkDesc.GetChunkZ(), &a_ChunkDesc
);
}
void cWorld::cChunkGeneratorCallbacks::CallHookChunkGenerated (cChunkDesc & a_ChunkDesc)
{
cPluginManager::Get()->CallHookChunkGenerated(
*m_World, a_ChunkDesc.GetChunkX(), a_ChunkDesc.GetChunkZ(), &a_ChunkDesc
);
}
cBroadcaster cWorld::GetBroadcaster()
{
return cBroadcaster(this);
}
| linnemannr/MCServer | src/World.cpp | C++ | apache-2.0 | 95,622 |
package org.shaolin.bmdp.runtime.ddc.client.api;
/**
* Created by lizhiwe on 4/5/2016.
*/
public interface DataAction {
byte[] execute(ZData data);
}
| shaolinwu/uimaster | modules/runtime/src/main/java/org/shaolin/bmdp/runtime/ddc/client/api/DataAction.java | Java | apache-2.0 | 157 |
<?php
namespace Swoft\Console;
use Swoft\App;
use Swoft\Console\Input\Input;
use Swoft\Console\Output\Output;
use Swoft\Console\Style\Style;
use Swoft\Helper\PhpHelper;
use Swoft\Web\ErrorHandler;
/**
* 命令行
*
* @uses Application
* @version 2017年10月06日
* @author stelin <phpcrazy@126.com>
* @copyright Copyright 2010-2016 swoft software
* @license PHP Version 7.x {@link http://www.php.net/license/3_0.txt}
*/
class Console implements IConsole
{
/**
* 默认命令组
*/
const DEFAULT_CMD_GROUP = 'server';
/**
* 默认命令
*/
const DEFAULT_CMD = "index";
/**
* 命令分隔符
*/
const DELIMITER = ':';
/**
* 命令后缀
*/
const CONTROLLER_SUFFIX = 'Controller';
/**
* 默认命令
*/
const DEFAULT_CMDS = [
'start',
'reload',
'stop',
'restart'
];
/**
* 参数输入
*
* @var Input
*/
private $input;
/**
* 参数输出
*
* @var Output
*/
private $output;
/**
* 命令扫描目录
*
* @var array
*/
private $scanCmds = [];
/**
* 错误处理器
*
* @var ErrorHandler
*/
private $errorHandler;
/**
* 每个命令唯一ID
*
* @var int
*/
private static $id;
/**
* 初始化
*/
public function __construct()
{
// 初始化样式
Style::init();
// 初始化
self::$id = time();
$this->registerNamespace();
$this->input = new Input();
$this->output = new Output();
$this->errorHandler = new ErrorHandler();
$this->errorHandler->register();
}
/**
* 运行命令行
*/
public function run()
{
// 默认命令解析
$cmd = $this->input->getCommand();
if (in_array($cmd, self::DEFAULT_CMDS, true)) {
$cmd = sprintf("%s:%s", self::DEFAULT_CMD_GROUP, $cmd);
}
// 没有任何命令输入
if (empty($cmd)) {
$this->baseCommand();
return;
}
// 运行命令
try {
$this->dispather($cmd);
} catch (\Throwable $e) {
$this->output->writeln(sprintf('<error>%s</error>', $e->getMessage()), true, false);
$this->output->writeln(sprintf('<error>%s</error>', $e->getTraceAsString()), true, true);
}
}
/**
* 引导命令界面
*/
private function baseCommand()
{
// 版本命令解析
if ($this->input->hasOpt('v') || $this->input->hasOpt('version')) {
$this->showVersion();
return;
}
// 显示命令列表
$this->showCommandList();
}
/**
* 显示命令列表
*/
private function showCommandList()
{
// 命令目录扫描
$commands = [];
foreach ($this->scanCmds as $namespace => $dir) {
$iterator = new \RecursiveDirectoryIterator($dir);
$files = new \RecursiveIteratorIterator($iterator);
$scanCommands = $this->parserCmdAndDesc($namespace, $files);
$commands = array_merge($commands, $scanCommands);
}
// 组拼命令结构
$commandList = [];
$script = $this->input->getFullScript();
$commandList['Usage:'] = ["php $script"];
$commandList['Commands:'] = $commands;
$commandList['Options:'] = [
'-v,--version' => 'show version',
'-h,--help' => 'show help'
];
// 显示Logo
$this->output->writeLogo();
// 显示命令组列表
$this->output->writeList($commandList, 'comment', 'info');
}
/**
* 解析命令和命令描述
*
* @param string $namespace 命名空间
* @param \SplFileInfo[] $files 文件集合
*
* @return array
*/
private function parserCmdAndDesc($namespace, $files)
{
$commands = [];
foreach ($files as $file) {
// 排除非PHP文件
$ext = pathinfo($file, PATHINFO_EXTENSION);
if ($ext != 'php') {
continue;
}
// 命令类名
$fileName = $file->getFilename();
list($class) = explode('.', $fileName);
$className = $namespace . '\\' . $class;
// 反射获取命令描述
$rc = new \ReflectionClass($className);
$docComment = $rc->getDocComment();
$docAry = DocumentParser::tagList($docComment);
$desc = $docAry['description'];
// 解析出命令
$cmdName = str_replace(self::CONTROLLER_SUFFIX, '', $class);
$cmd = strtolower($cmdName);
$commands[$cmd] = $desc;
}
return $commands;
}
/**
* 显示版本信息
*/
private function showVersion()
{
// 当前版本信息
$swoftVersion = App::version();
$phpVersio = phpversion();
$swooleVersion = swoole_version();
// 显示面板
$this->output->writeLogo();
$this->output->writeln("swoft: <info>$swoftVersion</info>, php: <info>$phpVersio</info>, swoole: <info>$swooleVersion</info>", true);
$this->output->writeln("");
}
/**
* 运行命令
*
* @param string $cmd
*/
private function dispather(string $cmd)
{
// 默认命令处理
if (strpos($cmd, self::DELIMITER) === false) {
$cmd = $cmd . self::DELIMITER . self::DEFAULT_CMD;
}
// 类和命令
list($controllerName, $command) = explode(self::DELIMITER, $cmd);
// 命令匹配
$isMatch = false;
$namespaces = array_keys($this->scanCmds);
foreach ($namespaces as $namespace) {
$controllerClass = $namespace . "\\" . ucfirst($controllerName) . self::CONTROLLER_SUFFIX;
// 类不存在
if (!class_exists($controllerClass)) {
continue;
}
// 选择第一个匹配的类
$isMatch = true;
$cmdController = new $controllerClass($this->input, $this->output);
PhpHelper::call([$cmdController, 'run'], [$command]);
break;
}
// 未匹配到命令
if ($isMatch == false) {
$this->output->writeln('<error>命令不存在</error>', true, true);
}
}
/**
* 添加命令扫描路径
*
* @param string $namespace 命令空间
* @param string $path 路径
*/
public function addScanNs(string $namespace, string $path)
{
if (!is_dir($path)) {
throw new \InvalidArgumentException("$path 路径不存在");
}
$this->scanCmds[$namespace] = $path;
}
/**
* 返回命名ID
*
* @return int
*/
public static function id()
{
return self::$id;
}
/**
* 扫描命名空间注入
*/
private function registerNamespace()
{
$this->scanCmds['Swoft\Console\Command'] = dirname(__FILE__) . "/Command";
$this->scanCmds[COMMAND_NS] = App::getAlias("@commands");
}
}
| uhonliu/swoft | src/Console/Console.php | PHP | apache-2.0 | 7,314 |