code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Copyright (C) 2014 Johannes Donath <johannesd@evil-co.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.evilco.emulator.extension.chip8;
import org.evilco.emulator.ui_old.extension.AbstractEmulatorExtension;
import org.evilco.emulator.ui_old.extension.InterfaceExtensionManager;
/**
* @author Johannes Donath <johannesd@evil-co.com>
* @copyright Copyright (C) 2014 Evil-Co <http://www.evil-co.com>
*/
public class Chip8Extension extends AbstractEmulatorExtension {
/**
* {@inheritDoc}
*/
@Override
public String getIdentifier () {
return "org.evilco.emulator.extension.chip8";
}
/**
* {@inheritDoc}
*/
@Override
public void onEnable (InterfaceExtensionManager extensionManager) {
super.onEnable (extensionManager);
extensionManager.registerExtension (this, "c8", Chip8Emulator.class);
}
} | Evil-Co-Legacy/CyborgEmulator | extension/chip8/src/main/java/org/evilco/emulator/extension/chip8/Chip8Extension.java | Java | apache-2.0 | 1,398 |
class PagosController < ApplicationController
before_action :set_pago, only: [:show, :edit, :update, :destroy]
# GET /pagos
# GET /pagos.json
def index
@pagos = Pago.all
end
# GET /pagos/1
# GET /pagos/1.json
def show
end
# GET /pagos/new
def new
@pago = Pago.new
end
# GET /pagos/1/edit
def edit
end
# POST /pagos
# POST /pagos.json
def create
@pago = Pago.new(pago_params)
respond_to do |format|
if @pago.save
format.html { redirect_to @pago, notice: 'Pago añadido' }
format.json { render :show, status: :created, location: @pago }
else
format.html { render :new }
format.json { render json: @pago.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /pagos/1
# PATCH/PUT /pagos/1.json
def update
respond_to do |format|
if @pago.update(pago_params)
format.html { redirect_to @pago, notice: 'Pago actualizado' }
format.json { render :show, status: :ok, location: @pago }
else
format.html { render :edit }
format.json { render json: @pago.errors, status: :unprocessable_entity }
end
end
end
# DELETE /pagos/1
# DELETE /pagos/1.json
def destroy
@pago.destroy
respond_to do |format|
format.html { redirect_to pagos_url, notice: 'Pago eliminado' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_pago
@pago = Pago.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def pago_params
params.require(:pago).permit(:cliente_id,:dominio_fecha_expiracion, :pagado, :fecha_pago, :ano,:dominio_nombre ,:dominio_anual ,:hosting_mes ,:hosting_anual, :total, :comentarios)
end
end
| felixparra/Gestzam | gestzam/app/controllers/pagos_controller.rb | Ruby | apache-2.0 | 1,867 |
/*
* Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
*/
#include "inter_vn_stats.h"
#include <oper/interface.h>
#include <oper/mirror_table.h>
using namespace std;
InterVnStatsCollector::VnStatsSet *InterVnStatsCollector::Find(string vn) {
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
return it->second;
}
return NULL;
}
void InterVnStatsCollector::PrintAll() {
VnStatsMap::iterator it = inter_vn_stats_.begin();
while(it != inter_vn_stats_.end()) {
PrintVn(it->first);
it++;
}
}
void InterVnStatsCollector::PrintVn(string vn) {
VnStatsSet *stats_set;
VnStats *stats;
LOG(DEBUG, "...........Stats for Vn " << vn);
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
stats_set = it->second;
/* Remove all the elements of map entry value which is a set */
VnStatsSet::iterator stats_it = stats_set->begin();
while(stats_it != stats_set->end()) {
stats = *stats_it;
stats_it++;
LOG(DEBUG, " Other-VN " << stats->dst_vn);
LOG(DEBUG, " in_pkts " << stats->in_pkts << " in_bytes " << stats->in_bytes);
LOG(DEBUG, " out_pkts " << stats->out_pkts << " out_bytes " << stats->out_bytes);
}
}
}
void InterVnStatsCollector::Remove(string vn) {
VnStatsSet *stats_set;
VnStats *stats;
VnStatsMap::iterator it = inter_vn_stats_.find(vn);
if (it != inter_vn_stats_.end()) {
stats_set = it->second;
/* Remove the entry from the inter_vn_stats_ map */
inter_vn_stats_.erase(it);
/* Remove all the elements of map entry value which is a set */
VnStatsSet::iterator stats_it = stats_set->begin();
VnStatsSet::iterator del_it;
while(stats_it != stats_set->end()) {
stats = *stats_it;
delete stats;
del_it = stats_it;
stats_it++;
stats_set->erase(del_it);
}
delete stats_set;
}
}
void InterVnStatsCollector::UpdateVnStats(FlowEntry *fe, uint64_t bytes,
uint64_t pkts) {
string src_vn = fe->data.source_vn, dst_vn = fe->data.dest_vn;
if (!fe->data.source_vn.length())
src_vn = *FlowHandler::UnknownVn();
if (!fe->data.dest_vn.length())
dst_vn = *FlowHandler::UnknownVn();
if (fe->local_flow) {
VnStatsUpdateInternal(src_vn, dst_vn, bytes, pkts, true);
VnStatsUpdateInternal(dst_vn, src_vn, bytes, pkts, false);
} else {
if (fe->data.ingress) {
VnStatsUpdateInternal(src_vn, dst_vn, bytes, pkts, true);
} else {
VnStatsUpdateInternal(dst_vn, src_vn, bytes, pkts, false);
}
}
//PrintAll();
}
void InterVnStatsCollector::VnStatsUpdateInternal(string src_vn, string dst_vn,
uint64_t bytes, uint64_t pkts,
bool outgoing) {
VnStatsSet *stats_set;
VnStats *stats;
VnStatsMap::iterator it = inter_vn_stats_.find(src_vn);
if (it == inter_vn_stats_.end()) {
stats = new VnStats(dst_vn, bytes, pkts, outgoing);
stats_set = new VnStatsSet;
stats_set->insert(stats);
inter_vn_stats_.insert(make_pair(src_vn, stats_set));
} else {
stats_set = it->second;
VnStats key(dst_vn, 0, 0, false);
VnStatsSet::iterator stats_it = stats_set->find(&key);
if (stats_it == stats_set->end()) {
stats = new VnStats(dst_vn, bytes, pkts, outgoing);
stats_set->insert(stats);
} else {
stats = *stats_it;
if (outgoing) {
stats->out_bytes += bytes;
stats->out_pkts += pkts;
} else {
stats->in_bytes += bytes;
stats->in_pkts += pkts;
}
}
}
}
| sysbot/contrail-controller | src/vnsw/agent/uve/inter_vn_stats.cc | C++ | apache-2.0 | 4,028 |
package com.fordprog.matrix.interpreter.type;
public enum Type {
RATIONAL,
MATRIX,
FUNCTION,
VOID
}
| daergoth/MatrixC | src/main/java/com/fordprog/matrix/interpreter/type/Type.java | Java | apache-2.0 | 115 |
/*
* Copyright 2016 peter.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package onl.area51.filesystem.io;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Map;
import org.kohsuke.MetaInfServices;
/**
* A flat FileSystem which locally matches it's structure
*/
@MetaInfServices(FileSystemIO.class)
public class Flat
extends LocalFileSystemIO
{
public Flat( Path basePath,
Map<String, ?> env )
{
super( basePath, env );
}
@Override
protected String getPath( char[] path )
throws IOException
{
return String.valueOf( path );
}
}
| peter-mount/filesystem | filesystem-core/src/main/java/onl/area51/filesystem/io/Flat.java | Java | apache-2.0 | 1,156 |
/*
* @author Flavio Keller
*
* Copyright 2014 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.sna.constants;
/**
* Enumeration for the different classes that can occur
* when running a SNA method algorithm
*/
public enum SNAClassNames {
DEGREE("Degree"), PAGERANK("PageRank"), CLOSENESS("Closeness"), BETWEENNESS("Betweenness"), PATH("Path"), LOCALCLUSTERCOEFFICIENT(
"LocalClusterCoefficient"), TRIADCENSUS("Triad Census"), LABELPROPAGATION(
"Label Propagation") ;
private final String className;
SNAClassNames(String name) {
this.className = name;
}
}
| fkzrh/signal-collect-sna | src/main/java/com/signalcollect/sna/constants/SNAClassNames.java | Java | apache-2.0 | 1,155 |
import zerorpc
import gevent.queue
import logging
import sys
logging.basicConfig()
# root logger
logger = logging.getLogger()
# set the mimimum level for root logger so it will be possible for a client
# to subscribe and receive logs for any log level
logger.setLevel(0)
class QueueingLogHandler(logging.Handler):
""" A simple logging handler which puts all emitted logs into a
gevent queue.
"""
def __init__(self, queue, level, formatter):
super(QueueingLogHandler, self).__init__()
self._queue = queue
self.setLevel(level)
self.setFormatter(formatter)
def emit(self, record):
msg = self.format(record)
self._queue.put_nowait(msg)
def close(self):
super(QueueingLogHandler, self).close()
self._queue.put_nowait(None)
@property
def emitted(self):
return self._queue
class TestService(object):
_HANDLER_CLASS = QueueingLogHandler
_DEFAULT_FORMAT = '%(name)s - %(levelname)s - %(asctime)s - %(message)s'
logger = logging.getLogger("service")
def __init__(self):
self._logging_handlers = set()
def test(self, logger_name, logger_level, message):
logger = logging.getLogger(logger_name)
getattr(logger, logger_level.lower())(message)
def available_loggers(self):
""" List of initalized loggers """
return logging.getLogger().manager.loggerDict.keys()
def close_log_streams(self):
""" Closes all log_stream streams. """
while self._logging_handlers:
self._logging_handlers.pop().close()
@zerorpc.stream
def log_stream(self, logger_name, level_name, format_str):
""" Attaches a log handler to the specified logger and sends emitted logs
back as stream.
"""
if logger_name != "" and logger_name not in self.available_loggers():
raise ValueError("logger {0} is not available".format(logger_name))
level_name_upper = level_name.upper() if level_name else "NOTSET"
try:
level = getattr(logging, level_name_upper)
except AttributeError, e:
raise AttributeError("log level {0} is not available".format(level_name_upper))
q = gevent.queue.Queue()
fmt = format_str if format_str.strip() else self._DEFAULT_FORMAT
logger = logging.getLogger(logger_name)
formatter = logging.Formatter(fmt)
handler = self._HANDLER_CLASS(q, level, formatter)
logger.addHandler(handler)
self._logging_handlers.add(handler)
self.logger.debug("new subscriber for {0}/{1}".format(logger_name or "root", level_name_upper))
try:
for msg in handler.emitted:
if msg is None:
return
yield msg
finally:
self._logging_handlers.discard(handler)
handler.close()
self.logger.debug("subscription finished for {0}/{1}".format(logger_name or "root", level_name_upper))
if __name__ == "__main__":
service = TestService()
server = zerorpc.Server(service)
server.bind(sys.argv[1])
logger.warning("starting service")
try:
server.run()
except BaseException, e:
logger.error(str(e))
finally:
logger.warning("shutting down")
| benctamas/zerorpc-logging | logstream_test.py | Python | apache-2.0 | 3,399 |
<?php
declare(strict_types=1);
namespace DaPigGuy\PiggyCustomEnchants\enchants\armor\chestplate;
use DaPigGuy\PiggyCustomEnchants\enchants\CustomEnchant;
use DaPigGuy\PiggyCustomEnchants\enchants\ToggleableEnchantment;
use DaPigGuy\PiggyCustomEnchants\enchants\traits\TickingTrait;
use pocketmine\entity\effect\EffectInstance;
use pocketmine\entity\effect\VanillaEffects;
use pocketmine\inventory\Inventory;
use pocketmine\item\Item;
use pocketmine\player\Player;
class ProwlEnchant extends ToggleableEnchantment
{
use TickingTrait;
public string $name = "Prowl";
public int $maxLevel = 1;
public int $usageType = CustomEnchant::TYPE_CHESTPLATE;
public int $itemType = CustomEnchant::ITEM_TYPE_CHESTPLATE;
/** @var bool[] */
public array $prowled;
public function toggle(Player $player, Item $item, Inventory $inventory, int $slot, int $level, bool $toggle): void
{
if (!$toggle && isset($this->prowled[$player->getName()])) {
foreach ($player->getServer()->getOnlinePlayers() as $p) {
$p->showPlayer($player);
}
$player->getEffects()->remove(VanillaEffects::SLOWNESS());
if (!$player->getEffects()->has(VanillaEffects::INVISIBILITY())) {
$player->setInvisible(false);
}
unset($this->prowled[$player->getName()]);
}
}
public function tick(Player $player, Item $item, Inventory $inventory, int $slot, int $level): void
{
if ($player->isSneaking()) {
foreach ($player->getServer()->getOnlinePlayers() as $p) {
$p->hidePlayer($player);
}
$effect = new EffectInstance(VanillaEffects::SLOWNESS(), 2147483647, 0, false);
$player->setInvisible();
$player->getEffects()->add($effect);
$this->prowled[$player->getName()] = true;
} else {
if (isset($this->prowled[$player->getName()])) {
foreach ($player->getServer()->getOnlinePlayers() as $p) {
$p->showPlayer($player);
}
$player->getEffects()->remove(VanillaEffects::SLOWNESS());
if (!$player->getEffects()->has(VanillaEffects::INVISIBILITY())) {
$player->setInvisible(false);
}
unset($this->prowled[$player->getName()]);
}
}
}
} | DaPigGuy/PiggyCustomEnchants | src/DaPigGuy/PiggyCustomEnchants/enchants/armor/chestplate/ProwlEnchant.php | PHP | apache-2.0 | 2,425 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.gaming.v1",
manifest={
"OperationMetadata",
"OperationStatus",
"LabelSelector",
"RealmSelector",
"Schedule",
"SpecSource",
"TargetDetails",
"TargetState",
"DeployedFleetDetails",
},
)
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
Attributes:
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation was
created.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation finished
running.
target (str):
Output only. Server-defined resource path for
the target of the operation.
verb (str):
Output only. Name of the verb executed by the
operation.
status_message (str):
Output only. Human-readable status of the
operation, if any.
requested_cancellation (bool):
Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have
successfully been cancelled have [Operation.error][] value
with a [google.rpc.Status.code][google.rpc.Status.code] of
1, corresponding to ``Code.CANCELLED``.
api_version (str):
Output only. API version used to start the
operation.
unreachable (Sequence[str]):
Output only. List of Locations that could not
be reached.
operation_status (Sequence[google.cloud.gaming_v1.types.OperationMetadata.OperationStatusEntry]):
Output only. Operation status for Game
Services API operations. Operation status is in
the form of key-value pairs where keys are
resource IDs and the values show the status of
the operation. In case of failures, the value
includes an error code and error message.
"""
create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
target = proto.Field(proto.STRING, number=3,)
verb = proto.Field(proto.STRING, number=4,)
status_message = proto.Field(proto.STRING, number=5,)
requested_cancellation = proto.Field(proto.BOOL, number=6,)
api_version = proto.Field(proto.STRING, number=7,)
unreachable = proto.RepeatedField(proto.STRING, number=8,)
operation_status = proto.MapField(
proto.STRING, proto.MESSAGE, number=9, message="OperationStatus",
)
class OperationStatus(proto.Message):
r"""
Attributes:
done (bool):
Output only. Whether the operation is done or
still in progress.
error_code (google.cloud.gaming_v1.types.OperationStatus.ErrorCode):
The error code in case of failures.
error_message (str):
The human-readable error message.
"""
class ErrorCode(proto.Enum):
r""""""
ERROR_CODE_UNSPECIFIED = 0
INTERNAL_ERROR = 1
PERMISSION_DENIED = 2
CLUSTER_CONNECTION = 3
done = proto.Field(proto.BOOL, number=1,)
error_code = proto.Field(proto.ENUM, number=2, enum=ErrorCode,)
error_message = proto.Field(proto.STRING, number=3,)
class LabelSelector(proto.Message):
r"""The label selector, used to group labels on the resources.
Attributes:
labels (Sequence[google.cloud.gaming_v1.types.LabelSelector.LabelsEntry]):
Resource labels for this selector.
"""
labels = proto.MapField(proto.STRING, proto.STRING, number=1,)
class RealmSelector(proto.Message):
r"""The realm selector, used to match realm resources.
Attributes:
realms (Sequence[str]):
List of realms to match.
"""
realms = proto.RepeatedField(proto.STRING, number=1,)
class Schedule(proto.Message):
r"""The schedule of a recurring or one time event. The event's time span
is specified by start_time and end_time. If the scheduled event's
timespan is larger than the cron_spec + cron_job_duration, the event
will be recurring. If only cron_spec + cron_job_duration are
specified, the event is effective starting at the local time
specified by cron_spec, and is recurring.
::
start_time|-------[cron job]-------[cron job]-------[cron job]---|end_time
cron job: cron spec start time + duration
Attributes:
start_time (google.protobuf.timestamp_pb2.Timestamp):
The start time of the event.
end_time (google.protobuf.timestamp_pb2.Timestamp):
The end time of the event.
cron_job_duration (google.protobuf.duration_pb2.Duration):
The duration for the cron job event. The
duration of the event is effective after the
cron job's start time.
cron_spec (str):
The cron definition of the scheduled event.
See https://en.wikipedia.org/wiki/Cron. Cron
spec specifies the local time as defined by the
realm.
"""
start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
cron_job_duration = proto.Field(
proto.MESSAGE, number=3, message=duration_pb2.Duration,
)
cron_spec = proto.Field(proto.STRING, number=4,)
class SpecSource(proto.Message):
r"""Encapsulates Agones fleet spec and Agones autoscaler spec
sources.
Attributes:
game_server_config_name (str):
The game server config resource. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}/configs/{config_id}``.
name (str):
The name of the Agones leet config or Agones
scaling config used to derive the Agones fleet
or Agones autoscaler spec.
"""
game_server_config_name = proto.Field(proto.STRING, number=1,)
name = proto.Field(proto.STRING, number=2,)
class TargetDetails(proto.Message):
r"""Details about the Agones resources.
Attributes:
game_server_cluster_name (str):
The game server cluster name. Uses the form:
``projects/{project}/locations/{location}/realms/{realm}/gameServerClusters/{cluster}``.
game_server_deployment_name (str):
The game server deployment name. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}``.
fleet_details (Sequence[google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails]):
Agones fleet details for game server clusters
and game server deployments.
"""
class TargetFleetDetails(proto.Message):
r"""Details of the target Agones fleet.
Attributes:
fleet (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleet):
Reference to target Agones fleet.
autoscaler (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleetAutoscaler):
Reference to target Agones fleet autoscaling
policy.
"""
class TargetFleet(proto.Message):
r"""Target Agones fleet specification.
Attributes:
name (str):
The name of the Agones fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. The Agones fleet spec source.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
class TargetFleetAutoscaler(proto.Message):
r"""Target Agones autoscaler policy reference.
Attributes:
name (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. Details about the Agones autoscaler spec.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
fleet = proto.Field(
proto.MESSAGE,
number=1,
message="TargetDetails.TargetFleetDetails.TargetFleet",
)
autoscaler = proto.Field(
proto.MESSAGE,
number=2,
message="TargetDetails.TargetFleetDetails.TargetFleetAutoscaler",
)
game_server_cluster_name = proto.Field(proto.STRING, number=1,)
game_server_deployment_name = proto.Field(proto.STRING, number=2,)
fleet_details = proto.RepeatedField(
proto.MESSAGE, number=3, message=TargetFleetDetails,
)
class TargetState(proto.Message):
r"""Encapsulates the Target state.
Attributes:
details (Sequence[google.cloud.gaming_v1.types.TargetDetails]):
Details about Agones fleets.
"""
details = proto.RepeatedField(proto.MESSAGE, number=1, message="TargetDetails",)
class DeployedFleetDetails(proto.Message):
r"""Details of the deployed Agones fleet.
Attributes:
deployed_fleet (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet):
Information about the Agones fleet.
deployed_autoscaler (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleetAutoscaler):
Information about the Agones autoscaler for
that fleet.
"""
class DeployedFleet(proto.Message):
r"""Agones fleet specification and details.
Attributes:
fleet (str):
The name of the Agones fleet.
fleet_spec (str):
The fleet spec retrieved from the Agones
fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
Agones fleet. The GameServerConfig resource may
no longer exist in the system.
status (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet.DeployedFleetStatus):
The current status of the Agones fleet.
Includes count of game servers in various
states.
"""
class DeployedFleetStatus(proto.Message):
r"""DeployedFleetStatus has details about the Agones fleets such
as how many are running, how many allocated, and so on.
Attributes:
ready_replicas (int):
The number of GameServer replicas in the
READY state in this fleet.
allocated_replicas (int):
The number of GameServer replicas in the
ALLOCATED state in this fleet.
reserved_replicas (int):
The number of GameServer replicas in the
RESERVED state in this fleet. Reserved instances
won't be deleted on scale down, but won't cause
an autoscaler to scale up.
replicas (int):
The total number of current GameServer
replicas in this fleet.
"""
ready_replicas = proto.Field(proto.INT64, number=1,)
allocated_replicas = proto.Field(proto.INT64, number=2,)
reserved_replicas = proto.Field(proto.INT64, number=3,)
replicas = proto.Field(proto.INT64, number=4,)
fleet = proto.Field(proto.STRING, number=1,)
fleet_spec = proto.Field(proto.STRING, number=2,)
spec_source = proto.Field(proto.MESSAGE, number=3, message="SpecSource",)
status = proto.Field(
proto.MESSAGE,
number=5,
message="DeployedFleetDetails.DeployedFleet.DeployedFleetStatus",
)
class DeployedFleetAutoscaler(proto.Message):
r"""Details about the Agones autoscaler.
Attributes:
autoscaler (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
autoscaler. The GameServerConfig resource may no
longer exist in the system.
fleet_autoscaler_spec (str):
The autoscaler spec retrieved from Agones.
"""
autoscaler = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=4, message="SpecSource",)
fleet_autoscaler_spec = proto.Field(proto.STRING, number=3,)
deployed_fleet = proto.Field(proto.MESSAGE, number=1, message=DeployedFleet,)
deployed_autoscaler = proto.Field(
proto.MESSAGE, number=2, message=DeployedFleetAutoscaler,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| googleapis/python-game-servers | google/cloud/gaming_v1/types/common.py | Python | apache-2.0 | 13,962 |
/**
* Copyright (c) 2013-2020 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.iterator;
import java.util.AbstractMap;
import java.util.Map;
import java.util.Map.Entry;
/**
*
* @author Nikita Koksharov
*
* @param <V> value type
*/
public abstract class RedissonBaseMapIterator<V> extends BaseIterator<V, Entry<Object, Object>> {
@SuppressWarnings("unchecked")
protected V getValue(Map.Entry<Object, Object> entry) {
return (V) new AbstractMap.SimpleEntry(entry.getKey(), entry.getValue()) {
@Override
public Object setValue(Object value) {
return put(entry, value);
}
};
}
protected abstract Object put(Entry<Object, Object> entry, Object value);
}
| mrniko/redisson | redisson/src/main/java/org/redisson/iterator/RedissonBaseMapIterator.java | Java | apache-2.0 | 1,297 |
package org.artifactory.ui.rest.resource.home;
import org.artifactory.api.security.AuthorizationService;
import org.artifactory.ui.rest.resource.BaseResource;
import org.artifactory.ui.rest.service.general.GeneralServiceFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
/**
* @author Chen keinan
*/
@Path("home")
@RolesAllowed({AuthorizationService.ROLE_ADMIN, AuthorizationService.ROLE_USER})
@Component
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
public class HomeResource extends BaseResource {
@Autowired
GeneralServiceFactory generalFactory;
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getHomeData()
throws Exception {
return runService(generalFactory.getHomePage());
}
}
| alancnet/artifactory | web/rest-ui/src/main/java/org/artifactory/ui/rest/resource/home/HomeResource.java | Java | apache-2.0 | 1,122 |
using Terraria;
using Terraria.ID;
using Terraria.ModLoader;
namespace Castleroid.NPCs
{
public class ai8 : ModNPC
{
public override void SetDefaults()
{
npc.name = "ai8";
npc.displayName = "ai8";
npc.width = 28;
npc.height = 20;
npc.damage = 2;
npc.defense = 0;
npc.lifeMax = 2;
npc.soundHit = 1;
npc.soundKilled = 16;
npc.value = 60f;
npc.knockBackResist = 0.0f;
npc.aiStyle = 8;
Main.npcFrameCount[npc.type] = Main.npcFrameCount[NPCID.CyanBeetle];
aiType = NPCID.CyanBeetle;
animationType = NPCID.CyanBeetle;
npc.noGravity = false;
npc.noTileCollide = true;
}
public override void HitEffect(int hitDirection, double damage)
{
for (int i = 0; i < 10; i++)
{
int dustType = Main.rand.Next(139, 143);
int dustIndex = Dust.NewDust(npc.position, npc.width, npc.height, dustType);
Dust dust = Main.dust[dustIndex];
dust.velocity.X = dust.velocity.X + Main.rand.Next(-50, 51) * 0.01f;
dust.velocity.Y = dust.velocity.Y + Main.rand.Next(-50, 51) * 0.01f;
dust.scale *= 1f + Main.rand.Next(-30, 31) * 0.01f;
}
}
}
}
| lukanpeixe/projetocastleroid | Castleroid - tmodloader - 1.0.7/NPCs/ai8.cs | C# | apache-2.0 | 1,113 |
/*
* Licensed to Cloudkick, Inc ('Cloudkick') under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Cloudkick licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudkick;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.Toast;
public class LoginActivity extends Activity {
private static final int SETTINGS_ACTIVITY_ID = 0;
RelativeLayout loginView = null;
private String user = null;
private String pass = null;
private ProgressDialog progress = null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.login);
setTitle("Cloudkick for Android");
findViewById(R.id.button_login).setOnClickListener(new LoginClickListener());
findViewById(R.id.button_signup).setOnClickListener(new SignupClickListener());
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == SETTINGS_ACTIVITY_ID) {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
if (prefs.getString("editKey", "").equals("") && prefs.getString("editSecret", "").equals("")) {
finish();
}
else {
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
finish();
}
}
}
private class LoginClickListener implements View.OnClickListener {
public void onClick(View v) {
new AccountLister().execute();
}
}
private class SignupClickListener implements View.OnClickListener {
public void onClick(View v) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.cloudkick.com/pricing/")));
}
}
private class AccountLister extends AsyncTask<Void, Void, ArrayList<String>>{
private Integer statusCode = null;
@Override
protected void onPreExecute() {
user = ((EditText) findViewById(R.id.input_email)).getText().toString();
pass = ((EditText) findViewById(R.id.input_password)).getText().toString();
progress = ProgressDialog.show(LoginActivity.this, "", "Logging In...", true);
}
@Override
protected ArrayList<String> doInBackground(Void...voids) {
ArrayList<String> accounts = new ArrayList<String>();
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/list_accounts/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
while ((line = rd.readLine()) != null) {
accounts.add(line);
Log.i("LoginActivity", line);
}
}
catch (Exception e) {
e.printStackTrace();
statusCode = 0;
}
return accounts;
}
@Override
protected void onPostExecute(ArrayList<String> accounts) {
switch (statusCode) {
case 200:
if (accounts.size() == 1) {
new KeyRetriever().execute(accounts.get(0));
}
else {
String[] tmpAccountArray = new String[accounts.size()];
final String[] accountArray = accounts.toArray(tmpAccountArray);
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("Select an Account");
builder.setItems(accountArray, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int item) {
new KeyRetriever().execute(accountArray[item]);
}
});
AlertDialog selectAccount = builder.create();
selectAccount.show();
}
break;
case 400:
progress.dismiss();
if (accounts.get(0).equals("You have enabled multi factor authentication for this account. To access the API key list, please visit the website.")) {
AlertDialog.Builder builder = new AlertDialog.Builder(LoginActivity.this);
builder.setTitle("MFA is Enabled");
String mfaMessage = ("You appear to have multi-factor authentication enabled on your account. "
+ "You will need to manually create an API key with read permissions in the "
+ "web interface, then enter it directly in the settings panel.");
builder.setMessage(mfaMessage);
builder.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
Intent settingsActivity = new Intent(getBaseContext(), Preferences.class);
startActivityForResult(settingsActivity, SETTINGS_ACTIVITY_ID);
}
});
AlertDialog mfaDialog = builder.create();
mfaDialog.show();
}
else {
Toast.makeText(LoginActivity.this, "Invalid Username or Password", Toast.LENGTH_LONG).show();
}
break;
default:
progress.dismiss();
Toast.makeText(LoginActivity.this, "An Error Occurred Retrieving Your Accounts", Toast.LENGTH_LONG).show();
};
}
}
private class KeyRetriever extends AsyncTask<String, Void, String[]>{
private Integer statusCode = null;
@Override
protected String[] doInBackground(String...accts) {
Log.i("LoginActivity", "Selected Account: " + accts[0]);
String[] creds = new String[2];
try {
HttpClient client = new DefaultHttpClient();
HttpPost post = new HttpPost("https://www.cloudkick.com/oauth/create_consumer/");
ArrayList<NameValuePair> values = new ArrayList<NameValuePair>(2);
values.add(new BasicNameValuePair("user", user));
values.add(new BasicNameValuePair("password", pass));
values.add(new BasicNameValuePair("account", accts[0]));
values.add(new BasicNameValuePair("system", "Cloudkick for Android"));
values.add(new BasicNameValuePair("perm_read", "True"));
values.add(new BasicNameValuePair("perm_write", "False"));
values.add(new BasicNameValuePair("perm_execute", "False"));
post.setEntity(new UrlEncodedFormEntity(values));
HttpResponse response = client.execute(post);
statusCode = response.getStatusLine().getStatusCode();
Log.i("LoginActivity", "Return Code: " + statusCode);
InputStream is = response.getEntity().getContent();
BufferedReader rd = new BufferedReader(new InputStreamReader(is));
String line;
for (int i = 0; i < 2; i++) {
line = rd.readLine();
if (line == null) {
return creds;
}
creds[i] = line;
}
}
catch (Exception e) {
statusCode = 0;
}
return creds;
}
@Override
protected void onPostExecute(String[] creds) {
progress.dismiss();
if (statusCode != 200) {
// Show short error messages - this is a dirty hack
if (creds[0] != null && creds[0].startsWith("User with role")) {
Toast.makeText(LoginActivity.this, creds[0], Toast.LENGTH_LONG).show();
}
else {
Toast.makeText(LoginActivity.this, "An Error Occurred on Login", Toast.LENGTH_LONG).show();
return;
}
}
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this);
SharedPreferences.Editor editor = prefs.edit();
editor.putString("editKey", creds[0]);
editor.putString("editSecret", creds[1]);
editor.commit();
Intent result = new Intent();
result.putExtra("login", true);
setResult(Activity.RESULT_OK, result);
LoginActivity.this.finish();
}
}
}
| cloudkick/cloudkick-android | src/com/cloudkick/LoginActivity.java | Java | apache-2.0 | 9,030 |
package ai.api.test;
/***********************************************************************************************************************
*
* API.AI Java SDK - client-side libraries for API.AI
* =================================================
*
* Copyright (C) 2014 by Speaktoit, Inc. (https://www.speaktoit.com)
* https://www.api.ai
*
***********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
***********************************************************************************************************************/
public class ProtocolProdTest extends ProtocolTestBase {
// Testing keys
protected static final String ACCESS_TOKEN = "3485a96fb27744db83e78b8c4bc9e7b7";
protected String getAccessToken() {
return ACCESS_TOKEN;
}
@Override
protected String getSecondAccessToken() {
return "968235e8e4954cf0bb0dc07736725ecd";
}
protected String getRuAccessToken(){
return "07806228a357411d83064309a279c7fd";
}
protected String getBrAccessToken(){
// TODO
return "";
}
protected String getPtBrAccessToken(){
return "42db6ad6a51c47088318a8104833b66c";
}
@Override
protected String getJaAccessToken() {
// TODO
return "";
}
}
| deternan/Weather-line-bot | libai/src/test/java/ai/api/test/ProtocolProdTest.java | Java | apache-2.0 | 1,887 |
class AddResourceToVersions < ActiveRecord::Migration[6.0]
def change
add_column :versions, :resource_id, :integer
add_column :versions, :resource_type, :string
add_index :versions, [:resource_type, :resource_id]
end
end
| psu-stewardship/scholarsphere | db/migrate/20210126195635_add_resource_to_versions.rb | Ruby | apache-2.0 | 237 |
package org.drools.persistence;
import javax.transaction.xa.XAResource;
public interface PersistenceManager {
XAResource getXAResource();
Transaction getTransaction();
void save();
void load();
} | bobmcwhirter/drools | drools-core/src/main/java/org/drools/persistence/PersistenceManager.java | Java | apache-2.0 | 232 |
package com.coolweather.android;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.view.ScrollingView;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.coolweather.android.gson.Forecast;
import com.coolweather.android.gson.Weather;
import com.coolweather.android.service.AutoUpdateService;
import com.coolweather.android.util.HttpUtil;
import com.coolweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class WeatherActivity extends AppCompatActivity {
private ScrollView weatherLayout;
private TextView titleCity;
private TextView titleUpdateTime;
private TextView degreeText;
private TextView weatherInfoText;
private LinearLayout forecastLayout;
private TextView aqiText;
private TextView pm25Text;
private TextView comfortText;
private TextView carWashText;
private TextView sportText;
private ImageView bingPicImg;
public SwipeRefreshLayout swipeRefreshLayout;
private String mWeatherId;
public DrawerLayout drawerLayout;
private Button navButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT >= 21) {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
getWindow().setStatusBarColor(Color.TRANSPARENT);
}
setContentView(R.layout.activity_weather);
//初始条件
weatherLayout = (ScrollView) findViewById(R.id.weather_layout);
titleCity = (TextView) findViewById(R.id.title_city);
titleUpdateTime = (TextView) findViewById(R.id.title_update_time);
degreeText = (TextView) findViewById(R.id.degree_text);
weatherInfoText = (TextView) findViewById(R.id.weather_info_text);
forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);
aqiText = (TextView) findViewById(R.id.aqi_text);
pm25Text = (TextView) findViewById(R.id.pm25_text);
comfortText = (TextView) findViewById(R.id.comfort_text);
carWashText = (TextView) findViewById(R.id.car_wash_text);
sportText = (TextView) findViewById(R.id.sport_text);
bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
navButton = (Button) findViewById(R.id.nav_button);
swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);
swipeRefreshLayout.setColorSchemeResources(R.color.colorTopic);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
//有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
mWeatherId = weather.basic.weatherId;
showWeatherInfo(weather);
} else {
//无缓存时去服务器查询天气
mWeatherId = getIntent().getStringExtra("weather_id");
String weatherId = getIntent().getStringExtra("weather_id");
weatherLayout.setVisibility(View.INVISIBLE);
requestWeather(weatherId);
}
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
requestWeather(mWeatherId);
}
});
String bingPic = prefs.getString("bing_pic", null);
if (bingPic != null) {
Glide.with(this).load(bingPic).into(bingPicImg);
} else {
loadBingPic();
}
}
/**
* 根据天气ID请求城市天气信息
*/
public void requestWeather(final String weatherId) {
String weatherUtl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key=04ae9fa43fb341b596f719aa6d6babda";
HttpUtil.sendOkHttpRequest(weatherUtl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
swipeRefreshLayout.setRefreshing(false);
}
});
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String responseText = response.body().string();
final Weather weather = Utility.handleWeatherResponse(responseText);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("weather", responseText);
editor.apply();
Toast.makeText(WeatherActivity.this, "成功更新最新天气", Toast.LENGTH_SHORT).show();
showWeatherInfo(weather);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
swipeRefreshLayout.setRefreshing(false);
}
});
}
});
loadBingPic();
}
private void loadBingPic() {
String requestBingPic = "http://guolin.tech/api/bing_pic";
HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String bingPic = response.body().string();
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("bing_pic", bingPic);
editor.apply();
runOnUiThread(new Runnable() {
@Override
public void run() {
Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);
}
});
}
});
}
/**
* 处理并展示Weather实体类中的数据
*/
private void showWeatherInfo(Weather weather) {
String cityName = weather.basic.cityName;
String updateTime = "更新时间: " + weather.basic.update.updateTime.split(" ")[1];
String degree = weather.now.temperature + "ºC";
String weatherInfo = weather.now.more.info;
titleCity.setText(cityName);
titleUpdateTime.setText(updateTime);
degreeText.setText(degree);
weatherInfoText.setText(weatherInfo);
forecastLayout.removeAllViews();
for (Forecast forecast : weather.forecastList) {
View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);
TextView dateText = (TextView) view.findViewById(R.id.date_text);
TextView infoText = (TextView) view.findViewById(R.id.info_text);
TextView maxText = (TextView) view.findViewById(R.id.max_text);
TextView minText = (TextView) view.findViewById(R.id.min_text);
dateText.setText(forecast.date);
infoText.setText(forecast.more.info);
maxText.setText(forecast.temperature.max);
minText.setText(forecast.temperature.min);
forecastLayout.addView(view);
}
if (weather.aqi != null) {
aqiText.setText(weather.aqi.city.aqi);
pm25Text.setText(weather.aqi.city.pm25);
}
String comfort = "舒适度:" + weather.suggestion.comfort.info;
String catWash = "洗车指数:" + weather.suggestion.carWash.info;
String sport = "运动指数:" + weather.suggestion.sport.info;
comfortText.setText(comfort);
carWashText.setText(catWash);
sportText.setText(sport);
weatherLayout.setVisibility(View.VISIBLE);
if (weather != null && "ok".equals(weather.status)) {
Intent intent = new Intent(this, AutoUpdateService.class);
startService(intent);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
}
}
| MarkManYUN/coolweather | app/src/main/java/com/coolweather/android/WeatherActivity.java | Java | apache-2.0 | 9,815 |
package com.example.cdm.huntfun.activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.widget.TextView;
import com.example.cdm.huntfun.R;
import com.example.cdm.huntfun.photoView.ImageDetailFragment;
import com.example.cdm.huntfun.widget.HackyViewPager;
import java.util.List;
/**
* 图片查看器
*/
public class ImagePagerActivity extends FragmentActivity {
private static final String STATE_POSITION = "STATE_POSITION";
public static final String EXTRA_IMAGE_INDEX = "image_index";
public static final String EXTRA_IMAGE_URLS = "image_urls";
private HackyViewPager mPager;
private int pagerPosition;
private TextView indicator;
// public static Drawable DEFAULTDRAWABLE;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.umessage_image_detail_pager);
// DEFAULTDRAWABLE=this.getResources().getDrawable(R.drawable.umessage_load_default);
pagerPosition = getIntent().getIntExtra(EXTRA_IMAGE_INDEX, 0);
List<String> urls = getIntent().getStringArrayListExtra(EXTRA_IMAGE_URLS);
mPager = (HackyViewPager) findViewById(R.id.pager);
ImagePagerAdapter mAdapter = new ImagePagerAdapter(getSupportFragmentManager(), urls);
mPager.setAdapter(mAdapter);
indicator = (TextView) findViewById(R.id.indicator);
CharSequence text = getString(R.string.xq_viewpager_indicator, 1, mPager.getAdapter().getCount());
indicator.setText(text);
// 更新下标
mPager.addOnPageChangeListener(new OnPageChangeListener() {
@Override
public void onPageScrollStateChanged(int arg0) {
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
}
@Override
public void onPageSelected(int arg0) {
CharSequence text = getString(R.string.xq_viewpager_indicator, arg0 + 1, mPager.getAdapter().getCount());
indicator.setText(text);
}
});
if (savedInstanceState != null) {
pagerPosition = savedInstanceState.getInt(STATE_POSITION);
}
mPager.setCurrentItem(pagerPosition);
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putInt(STATE_POSITION, mPager.getCurrentItem());
}
private class ImagePagerAdapter extends FragmentStatePagerAdapter {
public List<String> fileList;
public ImagePagerAdapter(FragmentManager fm, List<String> fileList) {
super(fm);
this.fileList = fileList;
}
@Override
public int getCount() {
return fileList == null ? 0 : fileList.size();
}
@Override
public Fragment getItem(int position) {
String url = fileList.get(position);
return ImageDetailFragment.newInstance(url);
}
}
}
| skycdm/HuntFun | app/src/main/java/com/example/cdm/huntfun/activity/ImagePagerActivity.java | Java | apache-2.0 | 2,872 |
/* Copyright (c) The m-m-m Team, Licensed under the Apache License, Version 2.0
* http://www.apache.org/licenses/LICENSE-2.0 */
package net.sf.mmm.util.version.impl;
import net.sf.mmm.util.version.api.DevelopmentPhase;
import net.sf.mmm.util.version.api.VersionIdentifier;
/**
* This is the implementation of {@link net.sf.mmm.util.lang.api.Formatter} for the {@link DevelopmentPhase#getValue()
* value} of the {@link VersionIdentifier#getPhase() phase}.
*
* @author Joerg Hohwiller (hohwille at users.sourceforge.net)
* @since 3.0.0
*/
public class VersionIdentifierFormatterPhaseValue extends AbstractVersionIdentifierFormatterString {
/**
* The constructor.
*
* @param prefix is the static prefix to append before the {@link VersionIdentifier#getPhase() phase}. Will be omitted
* if {@link VersionIdentifier#getPhase() phase} is {@code null}.
* @param maximumLength is the maximum number of letters for the {@link VersionIdentifier#getPhase() phase}. The
* default is {@link Integer#MAX_VALUE}.
*/
public VersionIdentifierFormatterPhaseValue(String prefix, int maximumLength) {
super(prefix, maximumLength);
}
@Override
protected String getString(VersionIdentifier value) {
DevelopmentPhase phase = value.getPhase();
if (phase != null) {
return phase.getValue();
}
return null;
}
}
| m-m-m/util | version/src/main/java/net/sf/mmm/util/version/impl/VersionIdentifierFormatterPhaseValue.java | Java | apache-2.0 | 1,372 |
package Escape;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.EventQueue;
import java.awt.Insets;
import javax.swing.JFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import Escape.Controller.Controller;
import Escape.Model.Arena;
import Escape.Service.Service;
import Escape.View.Rank;
import Escape.View.View;
import java.awt.Color;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.io.IOException;
import java.io.InputStream;
import java.util.logging.LogManager;
/**
* The main class of the program.
*/
public class Escape extends JFrame {
static {
InputStream in = Escape.class.getResourceAsStream("/logging.properties");
if (in != null) {
try {
LogManager.getLogManager().readConfiguration(in);
} catch(IOException e) {
e.printStackTrace();
}
}
}
/**
* The <code>serialVersionUID</code> of the class.
*/
private static final long serialVersionUID = -3689415169655758824L;
/**
* The main JPanel of the <code>frame</code>.
*/
private JPanel contentPane;
/**
* The main <code>Arena</code> object of the program.
*/
private Arena arena;
/**
* Part of the Game tab, the main <code>View</code> object.
*/
private View view;
/**
* The main <code>Controller</code> object of the program.
*/
private Controller control;
/**
* Part of the Rank tab, the main <code>Rank</code> object.
*/
private Rank rank;
/**
* The name of the player.
* Default is "Guest".
*/
private String username = "Guest";
/**
* The password for the database.
*/
private String DAOpassword = "pwd";
/**
* Main method of the program.
* Creates the main JFrame object and asks the user to set <code>DAOpassword</code>
* and <code>username</code> before start the game.
*
* @param args command-line parameters
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Escape frame = new Escape();
frame.setVisible(true);
do{
frame.DAOpassword = JOptionPane.showInputDialog(frame, "Enter password for database!");
} while(frame.DAOpassword.equals("pwd"));
do{
frame.username = JOptionPane.showInputDialog(frame, "Enter your in-game name!");
} while(frame.username.equals("") || frame.username == null);
frame.rank.setDAOpassword(frame.DAOpassword);
frame.rank.refreshRank();
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Constructor for the main JFrame object.
* Sets the <code>frame</code> and initialize the <code>arena</code>, <code>view</code>,
* <code>control</code>, <code>rank</code> variables, add tabs.
* Calls the <code>initMenu</code> for add menu.
*/
public Escape() {
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
setTitle("Escape");
setBounds(300, 0, 0, 0);
pack();
Insets insets = getInsets();
setSize(new Dimension(insets.left + insets.right + 600,
insets.top + insets.bottom + 630));
contentPane = new JPanel();
contentPane.setBackground(Color.WHITE);
arena = new Arena(6, 600);
view = new View(arena);
control = new Controller(arena, view);
view.setControl(control);
rank = new Rank();
setContentPane(contentPane);
contentPane.setLayout(new BorderLayout());
createMenuBar();
JTabbedPane tabbedPane = new JTabbedPane();
tabbedPane.addTab("Game", view);
tabbedPane.addTab("Rank", rank);
tabbedPane.setFocusable(false);
contentPane.add(tabbedPane);
setLocationRelativeTo(view.getPlayer());
}
/**
* Creates the Menu and add to the main JFrame.
* Creates the "New Game", "Save Game" and "Exit" items and
* add ActionListener for control actions.
*/
private void createMenuBar() {
JMenuBar menubar = new JMenuBar();
JMenu file = new JMenu("File");
file.setMnemonic(KeyEvent.VK_F);
JMenuItem newGameMenuItem = new JMenuItem("New Game");
newGameMenuItem.setMnemonic(KeyEvent.VK_E);
newGameMenuItem.setToolTipText("Start a new game");
newGameMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
Service.newGame(arena, control, view);
}
});
JMenuItem saveGameMenuItem = new JMenuItem("Save Game");
saveGameMenuItem.setMnemonic(KeyEvent.VK_E);
saveGameMenuItem.setToolTipText("Save the actual score and start a new game!");
saveGameMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
System.out.println(control.getPlayerScore()+control.getEnemyScore());
Service.saveGame(control, username, DAOpassword);
Service.newGame(arena, control, view);
}
});
JMenuItem exitMenuItem = new JMenuItem("Exit");
exitMenuItem.setMnemonic(KeyEvent.VK_E);
exitMenuItem.setToolTipText("Exit application");
exitMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent event) {
System.exit(0);
}
});
file.add(newGameMenuItem);
file.add(saveGameMenuItem);
file.add(exitMenuItem);
menubar.add(file);
setJMenuBar(menubar);
}
}
| Zakemi/Escape | src/main/java/Escape/Escape.java | Java | apache-2.0 | 5,644 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.influxdb;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.CamelContextHelper;
import org.influxdb.InfluxDB;
/**
* The influxdb component allows you to interact with <a href="https://influxdata.com/time-series-platform/influxdb/">InfluxDB</a>, a time series database.
*/
@UriEndpoint(firstVersion = "2.18.0", scheme = "influxdb", title = "InfluxDB", syntax = "influxdb:connectionBean", label = "database", producerOnly = true)
public class InfluxDbEndpoint extends DefaultEndpoint {
private InfluxDB influxDB;
@UriPath
@Metadata(required = "true")
private String connectionBean;
@UriParam
private String databaseName;
@UriParam(defaultValue = "default")
private String retentionPolicy = "default";
@UriParam(defaultValue = "false")
private boolean batch;
@UriParam(defaultValue = InfluxDbOperations.INSERT)
private String operation = InfluxDbOperations.INSERT;
@UriParam
private String query;
public InfluxDbEndpoint(String uri, InfluxDbComponent component) {
super(uri, component);
}
@Override
public Producer createProducer() throws Exception {
return new InfluxDbProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("You cannot receive messages from this endpoint");
}
@Override
protected void doStart() throws Exception {
influxDB = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, InfluxDB.class);
log.debug("Resolved the connection with the name {} as {}", connectionBean, influxDB);
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
}
@Override
public boolean isSingleton() {
return true;
}
public InfluxDB getInfluxDB() {
return influxDB;
}
/**
* The Influx DB to use
*/
public void setInfluxDB(InfluxDB influxDB) {
this.influxDB = influxDB;
}
public String getDatabaseName() {
return databaseName;
}
/**
* The name of the database where the time series will be stored
*/
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getRetentionPolicy() {
return retentionPolicy;
}
/**
* The string that defines the retention policy to the data created by the endpoint
*/
public void setRetentionPolicy(String retentionPolicy) {
this.retentionPolicy = retentionPolicy;
}
public String getConnectionBean() {
return connectionBean;
}
/**
* Connection to the influx database, of class InfluxDB.class
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
public boolean isBatch() {
return batch;
}
/**
* Define if this operation is a batch operation or not
*/
public void setBatch(boolean batch) {
this.batch = batch;
}
public String getOperation() {
return operation;
}
/**
* Define if this operation is an insert or a query
*/
public void setOperation(String operation) {
this.operation = operation;
}
public String getQuery() {
return query;
}
/**
* Define the query in case of operation query
*/
public void setQuery(String query) {
this.query = query;
}
}
| kevinearls/camel | components/camel-influxdb/src/main/java/org/apache/camel/component/influxdb/InfluxDbEndpoint.java | Java | apache-2.0 | 4,672 |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xiaomi.smarthome.common.ui.dialog;
import static android.view.ViewGroup.LayoutParams.MATCH_PARENT;
import java.lang.ref.WeakReference;
import android.content.Context;
import android.content.DialogInterface;
import android.database.Cursor;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.os.Message;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.CursorAdapter;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.ScrollView;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
import com.xiaomi.common.R;
public class MLAlertController {
private static final int BIT_BUTTON_POSITIVE = 1;
private static final int BIT_BUTTON_NEGATIVE = 2;
private static final int BIT_BUTTON_NEUTRAL = 4;
private final Context mContext;
private final DialogInterface mDialogInterface;
private final Window mWindow;
private CharSequence mTitle;
private CharSequence mMessage;
private ListView mListView;
private View mView;
private int mViewSpacingLeft;
private int mViewSpacingTop;
private int mViewSpacingRight;
private int mViewSpacingBottom;
private boolean mViewSpacingSpecified = false;
private Button mButtonPositive;
private CharSequence mButtonPositiveText;
private Message mButtonPositiveMessage;
private Button mButtonNegative;
private CharSequence mButtonNegativeText;
private Message mButtonNegativeMessage;
private Button mButtonNeutral;
private CharSequence mButtonNeutralText;
private Message mButtonNeutralMessage;
private ScrollView mScrollView;
private int mIconId = -1;
private Drawable mIcon;
private ImageView mIconView;
private TextView mTitleView;
private TextView mMessageView;
private View mCustomTitleView;
private boolean mForceInverseBackground;
private ListAdapter mAdapter;
private int mCheckedItem = -1;
private int mAlertDialogLayout;
private int mListLayout;
private int mListLayoutWithTitle;
private int mMultiChoiceItemLayout;
private int mSingleChoiceItemLayout;
private int mListItemLayout;
// add by afei for progressDialog Top and normal is Bottom
private int mGravity;
private Handler mHandler;
private boolean mTransplantBg = false;
private boolean mAutoDismiss = true; // 对话框在点击按钮之后是否自动消失
private boolean mCustomBgTransplant = false;
View.OnClickListener mButtonHandler = new View.OnClickListener() {
public void onClick(View v) {
Message m = null;
if (v == mButtonPositive && mButtonPositiveMessage != null) {
m = Message.obtain(mButtonPositiveMessage);
} else if (v == mButtonNegative && mButtonNegativeMessage != null) {
m = Message.obtain(mButtonNegativeMessage);
} else if (v == mButtonNeutral && mButtonNeutralMessage != null) {
m = Message.obtain(mButtonNeutralMessage);
}
if (m != null) {
m.sendToTarget();
}
if (mAutoDismiss) {
// Post a message so we dismiss after the above handlers are
// executed
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface)
.sendToTarget();
}
}
};
private static final class ButtonHandler extends Handler {
// Button clicks have Message.what as the BUTTON{1,2,3} constant
private static final int MSG_DISMISS_DIALOG = 1;
private WeakReference<DialogInterface> mDialog;
public ButtonHandler(DialogInterface dialog) {
mDialog = new WeakReference<DialogInterface>(dialog);
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case DialogInterface.BUTTON_POSITIVE:
case DialogInterface.BUTTON_NEGATIVE:
case DialogInterface.BUTTON_NEUTRAL:
((DialogInterface.OnClickListener) msg.obj).onClick(mDialog.get(), msg.what);
break;
case MSG_DISMISS_DIALOG:
((DialogInterface) msg.obj).dismiss();
}
}
}
public void sendDismissMessage() {
mHandler.obtainMessage(ButtonHandler.MSG_DISMISS_DIALOG, mDialogInterface).sendToTarget();
}
public MLAlertController(Context context, DialogInterface di, Window window) {
this(context, di, window, Gravity.BOTTOM);
}
public MLAlertController(Context context, DialogInterface di, Window window, int gravity) {
mContext = context;
mDialogInterface = di;
mWindow = window;
mHandler = new ButtonHandler(di);
mAlertDialogLayout = R.layout.ml_alert_dialog;
mListLayout = R.layout.ml_select_dialog;
mListLayoutWithTitle = R.layout.ml_select_dialog_center;
mMultiChoiceItemLayout = R.layout.ml_select_dialog_multichoice;
mSingleChoiceItemLayout = R.layout.ml_select_dialog_singlechoice;
mListItemLayout = R.layout.ml_select_dialog_item;
mGravity = gravity;
}
static boolean canTextInput(View v) {
if (v.onCheckIsTextEditor()) {
return true;
}
if (!(v instanceof ViewGroup)) {
return false;
}
ViewGroup vg = (ViewGroup) v;
int i = vg.getChildCount();
while (i > 0) {
i--;
v = vg.getChildAt(i);
if (canTextInput(v)) {
return true;
}
}
return false;
}
public void installContent() {
/* We use a custom title so never request a window title */
mWindow.requestFeature(Window.FEATURE_NO_TITLE);
mWindow.setGravity(mGravity);
if (mView == null || !canTextInput(mView)) {
mWindow.setFlags(WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM,
WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM);
}
mWindow.setContentView(mAlertDialogLayout);
setupView();
}
public void setTitle(CharSequence title) {
mTitle = title;
if (mTitleView != null) {
mTitleView.setText(title);
}
}
/**
* @see android.app.AlertDialog.Builder#setCustomTitle(View)
*/
public void setCustomTitle(View customTitleView) {
mCustomTitleView = customTitleView;
}
public void setAudoDismiss(boolean autoDismiss) {
mAutoDismiss = autoDismiss;
}
public void setMessage(CharSequence message) {
mMessage = message;
if (mMessageView != null) {
mMessageView.setText(message);
}
}
/**
* Set the view to display in the dialog.
*/
public void setView(View view) {
mView = view;
mViewSpacingSpecified = false;
}
public void setCustomTransplant(boolean b) {
mCustomBgTransplant = b;
}
/**
* Set the view to display in the dialog along with the spacing around that
* view
*/
public void setView(View view, int viewSpacingLeft, int viewSpacingTop, int viewSpacingRight,
int viewSpacingBottom) {
mView = view;
mViewSpacingSpecified = true;
mViewSpacingLeft = viewSpacingLeft;
mViewSpacingTop = viewSpacingTop;
mViewSpacingRight = viewSpacingRight;
mViewSpacingBottom = viewSpacingBottom;
}
/**
* Sets a click listener or a message to be sent when the button is clicked.
* You only need to pass one of {@code listener} or {@code msg}.
*
* @param whichButton Which button, can be one of
* {@link DialogInterface#BUTTON_POSITIVE},
* {@link DialogInterface#BUTTON_NEGATIVE}, or
* {@link DialogInterface#BUTTON_NEUTRAL}
* @param text The text to display in positive button.
* @param listener The
* {@link DialogInterface.OnClickListener} to
* use.
* @param msg The {@link Message} to be sent when clicked.
*/
public void setButton(int whichButton, CharSequence text,
DialogInterface.OnClickListener listener, Message msg) {
if (msg == null && listener != null) {
msg = mHandler.obtainMessage(whichButton, listener);
}
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
mButtonPositiveText = text;
mButtonPositiveMessage = msg;
break;
case DialogInterface.BUTTON_NEGATIVE:
mButtonNegativeText = text;
mButtonNegativeMessage = msg;
break;
case DialogInterface.BUTTON_NEUTRAL:
mButtonNeutralText = text;
mButtonNeutralMessage = msg;
break;
default:
throw new IllegalArgumentException("Button does not exist");
}
}
/**
* Set resId to 0 if you don't want an icon.
*
* @param resId the resourceId of the drawable to use as the icon or 0 if
* you don't want an icon.
*/
public void setIcon(int resId) {
mIconId = resId;
if (mIconView != null) {
if (resId > 0) {
mIconView.setImageResource(mIconId);
} else if (resId == 0) {
mIconView.setVisibility(View.GONE);
}
}
}
public void setIcon(Drawable icon) {
mIcon = icon;
if ((mIconView != null) && (mIcon != null)) {
mIconView.setImageDrawable(icon);
}
}
public void setInverseBackgroundForced(boolean forceInverseBackground) {
mForceInverseBackground = forceInverseBackground;
}
public ListView getListView() {
return mListView;
}
public View getView() {
return mView;
}
public Button getButton(int whichButton) {
switch (whichButton) {
case DialogInterface.BUTTON_POSITIVE:
return mButtonPositive;
case DialogInterface.BUTTON_NEGATIVE:
return mButtonNegative;
case DialogInterface.BUTTON_NEUTRAL:
return mButtonNeutral;
default:
return null;
}
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU && mListView != null
&& mListView.getVisibility() == View.VISIBLE) {
this.mDialogInterface.dismiss();
}
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
@SuppressWarnings({
"UnusedDeclaration"
})
public boolean onKeyUp(int keyCode, KeyEvent event) {
return mScrollView != null && mScrollView.executeKeyEvent(event);
}
private void setupView() {
LinearLayout contentPanel = (LinearLayout) mWindow.findViewById(R.id.contentPanel);
setupContent(contentPanel);
boolean hasButtons = setupButtons();
LinearLayout topPanel = (LinearLayout) mWindow.findViewById(R.id.topPanel);
boolean hasTitle = setupTitle(topPanel);
View buttonPanel = mWindow.findViewById(R.id.buttonPanel);
if (!hasButtons) {
buttonPanel.setVisibility(View.GONE);
}
FrameLayout customPanel = (FrameLayout) mWindow.findViewById(R.id.customPanel);
if (mView != null) {
// 自定义dialog透明背景
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(mContext.getResources().getColor(android.R.color.transparent));
FrameLayout custom = (FrameLayout) mWindow.findViewById(R.id.custom);
custom.addView(mView);
if (mViewSpacingSpecified) {
custom.setPadding(mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
if (mCustomBgTransplant)
mTransplantBg = true;
}
if (mListView != null) {
((LinearLayout.LayoutParams) customPanel.getLayoutParams()).weight = 0;
}
} else {
customPanel.setVisibility(View.GONE);
}
if (mTransplantBg) {
mWindow.findViewById(R.id.parentPanel).setBackgroundColor(
mContext.getResources().getColor(android.R.color.transparent));
} else {
// mWindow.findViewById(R.id.parentPanel).setBackgroundColor(0xffffffff);
}
if (mListView != null) {
// Listview有分割线divider,因此header和listview需要显示分割线
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.VISIBLE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.VISIBLE);
} else {
mWindow.findViewById(R.id.title_divider_line).setVisibility(View.GONE);
mWindow.findViewById(R.id.title_divider_line_bottom).setVisibility(View.GONE);
}
/**
* Add margin top for the button panel if we have not any panel
*/
if (topPanel.getVisibility() == View.GONE && contentPanel.getVisibility() == View.GONE
&& customPanel.getVisibility() == View.GONE && hasButtons) {
buttonPanel.setPadding(buttonPanel.getPaddingLeft(), buttonPanel.getPaddingBottom(),
buttonPanel.getPaddingRight(), buttonPanel.getPaddingBottom());
}
/*
* Only display the divider if we have a title and a custom view or a
* message.
*/
if (hasTitle) {
// View divider = null;
// if (mMessage != null || mView != null || mListView != null) {
// divider = mWindow.findViewById(R.id.titleDivider);
// } else {
// divider = mWindow.findViewById(R.id.titleDividerTop);
// }
//
// if (divider != null) {
// divider.setVisibility(View.VISIBLE);
// }
}
setBackground(topPanel, contentPanel, customPanel, hasButtons, hasTitle, buttonPanel);
if (TextUtils.isEmpty(mTitle) && TextUtils.isEmpty(mMessage)) {
mWindow.findViewById(R.id.empty_view).setVisibility(View.GONE);
}
}
private boolean setupTitle(LinearLayout topPanel) {
boolean hasTitle = true;
if (mCustomTitleView != null) {
// Add the custom title view directly to the topPanel layout
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
topPanel.addView(mCustomTitleView, 0, lp);
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
} else {
final boolean hasTextTitle = !TextUtils.isEmpty(mTitle);
mIconView = (ImageView) mWindow.findViewById(R.id.icon);
if (hasTextTitle) {
/* Display the title if a title is supplied, else hide it */
mTitleView = (TextView) mWindow.findViewById(R.id.alertTitle);
mTitleView.setText(mTitle);
/*
* Do this last so that if the user has supplied any icons we
* use them instead of the default ones. If the user has
* specified 0 then make it disappear.
*/
if (mIconId > 0) {
mIconView.setImageResource(mIconId);
} else if (mIcon != null) {
mIconView.setImageDrawable(mIcon);
} else if (mIconId == 0) {
/*
* Apply the padding from the icon to ensure the title is
* aligned correctly.
*/
mTitleView.setPadding(mIconView.getPaddingLeft(),
mIconView.getPaddingTop(),
mIconView.getPaddingRight(),
mIconView.getPaddingBottom());
mIconView.setVisibility(View.GONE);
}
} else {
// Hide the title template
View titleTemplate = mWindow.findViewById(R.id.title_template);
titleTemplate.setVisibility(View.GONE);
mIconView.setVisibility(View.GONE);
topPanel.setVisibility(View.GONE);
hasTitle = false;
}
}
return hasTitle;
}
private void setupContent(LinearLayout contentPanel) {
mScrollView = (ScrollView) mWindow.findViewById(R.id.scrollView);
mScrollView.setFocusable(false);
// Special case for users that only want to display a String
mMessageView = (TextView) mWindow.findViewById(R.id.message);
if (mMessageView == null) {
return;
}
if (mMessage != null) {
mMessageView.setText(mMessage);
} else {
mMessageView.setVisibility(View.GONE);
mScrollView.removeView(mMessageView);
if (mListView != null) {
contentPanel.removeView(mWindow.findViewById(R.id.scrollView));
contentPanel.addView(mListView,
new LinearLayout.LayoutParams(MATCH_PARENT, MATCH_PARENT));
contentPanel.setLayoutParams(new LinearLayout.LayoutParams(MATCH_PARENT, 0, 1.0f));
} else {
contentPanel.setVisibility(View.GONE);
}
}
}
private boolean setupButtons() {
int whichButtons = 0;
mButtonPositive = (Button) mWindow.findViewById(R.id.button1);
mButtonPositive.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonPositiveText)) {
mButtonPositive.setVisibility(View.GONE);
} else {
mButtonPositive.setText(mButtonPositiveText);
mButtonPositive.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_POSITIVE;
}
mButtonNegative = (Button) mWindow.findViewById(R.id.button2);
mButtonNegative.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNegativeText)) {
mButtonNegative.setVisibility(View.GONE);
} else {
mButtonNegative.setText(mButtonNegativeText);
mButtonNegative.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEGATIVE;
}
mButtonNeutral = (Button) mWindow.findViewById(R.id.button3);
mButtonNeutral.setOnClickListener(mButtonHandler);
if (TextUtils.isEmpty(mButtonNeutralText)) {
mButtonNeutral.setVisibility(View.GONE);
} else {
mButtonNeutral.setText(mButtonNeutralText);
mButtonNeutral.setVisibility(View.VISIBLE);
whichButtons = whichButtons | BIT_BUTTON_NEUTRAL;
}
if (shouldCenterSingleButton(whichButtons)) {
if (whichButtons == BIT_BUTTON_POSITIVE) {
centerButton(mButtonPositive);
} else if (whichButtons == BIT_BUTTON_NEGATIVE) {
centerButton(mButtonNegative);
} else if (whichButtons == BIT_BUTTON_NEUTRAL) {
centerButton(mButtonNeutral);
}
}
return whichButtons != 0;
}
private static boolean shouldCenterSingleButton(int whichButton) {
return whichButton == BIT_BUTTON_POSITIVE
|| whichButton == BIT_BUTTON_NEGATIVE
|| whichButton == BIT_BUTTON_NEUTRAL;
}
private void centerButton(TextView button) {
LinearLayout.LayoutParams params = (LinearLayout.LayoutParams) button.getLayoutParams();
params.gravity = Gravity.CENTER_HORIZONTAL;
params.weight = 0.5f;
button.setLayoutParams(params);
button.setBackgroundResource(R.drawable.common_button);
}
private void setBackground(LinearLayout topPanel, LinearLayout contentPanel,
View customPanel, boolean hasButtons, boolean hasTitle,
View buttonPanel) {
if (mTransplantBg) {
/* Get all the different background required */
int fullDark = mContext.getResources().getColor(android.R.color.transparent);
int topDark = mContext.getResources().getColor(android.R.color.transparent);
int centerDark = mContext.getResources().getColor(android.R.color.transparent);
int bottomDark = mContext.getResources().getColor(android.R.color.transparent);
int fullBright = mContext.getResources().getColor(android.R.color.transparent);
int topBright = mContext.getResources().getColor(android.R.color.transparent);
int centerBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomBright = mContext.getResources().getColor(android.R.color.transparent);
int bottomMedium = mContext.getResources().getColor(android.R.color.transparent);
/*
* We now set the background of all of the sections of the alert.
* First collect together each section that is being displayed along
* with whether it is on a light or dark background, then run
* through them setting their backgrounds. This is complicated
* because we need to correctly use the full, top, middle, and
* bottom graphics depending on how many views they are and where
* they appear.
*/
View[] views = new View[4];
boolean[] light = new boolean[4];
View lastView = null;
boolean lastLight = false;
int pos = 0;
if (hasTitle) {
views[pos] = topPanel;
light[pos] = false;
pos++;
}
/*
* The contentPanel displays either a custom text message or a
* ListView. If it's text we should use the dark background for
* ListView we should use the light background. If neither are there
* the contentPanel will be hidden so set it as null.
*/
views[pos] = (contentPanel.getVisibility() == View.GONE)
? null : contentPanel;
light[pos] = mListView != null;
pos++;
if (customPanel != null) {
views[pos] = customPanel;
light[pos] = mForceInverseBackground;
pos++;
}
if (hasButtons) {
views[pos] = buttonPanel;
light[pos] = true;
}
boolean setView = false;
for (pos = 0; pos < views.length; pos++) {
View v = views[pos];
if (v == null) {
continue;
}
if (lastView != null) {
if (!setView) {
lastView.setBackgroundResource(lastLight ? topBright : topDark);
} else {
lastView.setBackgroundResource(lastLight ? centerBright : centerDark);
}
setView = true;
}
lastView = v;
lastLight = light[pos];
}
if (lastView != null) {
if (setView) {
/*
* ListViews will use the Bright background but buttons use
* the Medium background.
*/
lastView.setBackgroundResource(
lastLight ? (hasButtons ? bottomMedium : bottomBright) : bottomDark);
} else {
lastView.setBackgroundResource(lastLight ? fullBright : fullDark);
}
}
}
if ((mListView != null) && (mAdapter != null)) {
mListView.setAdapter(mAdapter);
if (mCheckedItem > -1) {
mListView.setItemChecked(mCheckedItem, true);
mListView.setSelection(mCheckedItem);
}
}
}
public static class RecycleListView extends ListView {
boolean mRecycleOnMeasure = true;
public RecycleListView(Context context) {
super(context);
}
public RecycleListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public RecycleListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
protected boolean recycleOnMeasure() {
return mRecycleOnMeasure;
}
}
public static class AlertParams {
public final Context mContext;
public final LayoutInflater mInflater;
public int mIconId = 0;
public Drawable mIcon;
public CharSequence mTitle;
public View mCustomTitleView;
public CharSequence mMessage;
public CharSequence mPositiveButtonText;
public DialogInterface.OnClickListener mPositiveButtonListener;
public CharSequence mNegativeButtonText;
public DialogInterface.OnClickListener mNegativeButtonListener;
public CharSequence mNeutralButtonText;
public DialogInterface.OnClickListener mNeutralButtonListener;
public boolean mCancelable;
public DialogInterface.OnCancelListener mOnCancelListener;
public DialogInterface.OnKeyListener mOnKeyListener;
public CharSequence[] mItems;
public ListAdapter mAdapter;
public DialogInterface.OnClickListener mOnClickListener;
public View mView;
public int mViewSpacingLeft;
public int mViewSpacingTop;
public int mViewSpacingRight;
public int mViewSpacingBottom;
public boolean mViewSpacingSpecified = false;
public boolean[] mCheckedItems;
public boolean mIsMultiChoice;
public boolean mIsSingleChoice;
public int mCheckedItem = -1;
public DialogInterface.OnMultiChoiceClickListener mOnCheckboxClickListener;
public Cursor mCursor;
public String mLabelColumn;
public String mIsCheckedColumn;
public boolean mForceInverseBackground;
public AdapterView.OnItemSelectedListener mOnItemSelectedListener;
public OnPrepareListViewListener mOnPrepareListViewListener;
public boolean mRecycleOnMeasure = true;
public boolean mAutoDismiss = true;
public MLAlertDialog.DismissCallBack mDismissCallBack;
public CharSequence mCustomTitle;
public boolean mCustomBgTransplant = false;
/**
* Interface definition for a callback to be invoked before the ListView
* will be bound to an adapter.
*/
public interface OnPrepareListViewListener {
/**
* Called before the ListView is bound to an adapter.
*
* @param listView The ListView that will be shown in the dialog.
*/
void onPrepareListView(ListView listView);
}
public AlertParams(Context context) {
mContext = context;
mCancelable = true;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
public void apply(MLAlertController dialog) {
if (mCustomTitleView != null) {
dialog.setCustomTitle(mCustomTitleView);
} else {
if (mTitle != null) {
dialog.setTitle(mTitle);
}
if (mIcon != null) {
dialog.setIcon(mIcon);
}
if (mIconId >= 0) {
dialog.setIcon(mIconId);
}
}
if (mMessage != null) {
dialog.setMessage(mMessage);
}
if (mPositiveButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_POSITIVE, mPositiveButtonText,
mPositiveButtonListener, null);
}
if (mNegativeButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEGATIVE, mNegativeButtonText,
mNegativeButtonListener, null);
}
if (mNeutralButtonText != null) {
dialog.setButton(DialogInterface.BUTTON_NEUTRAL, mNeutralButtonText,
mNeutralButtonListener, null);
}
if (mForceInverseBackground) {
dialog.setInverseBackgroundForced(true);
}
// For a list, the client can either supply an array of items or an
// adapter or a cursor
dialog.mTransplantBg = false;
if ((mItems != null) || (mCursor != null) || (mAdapter != null)) {
if (dialog.mGravity == Gravity.CENTER) {
createCenterListView(dialog);
} else {
createListView(dialog);
}
}
if (mView != null) {
if (mViewSpacingSpecified) {
dialog.setView(mView, mViewSpacingLeft, mViewSpacingTop, mViewSpacingRight,
mViewSpacingBottom);
} else {
dialog.setView(mView);
}
}
dialog.setAudoDismiss(mAutoDismiss);
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createCenterListView(final MLAlertController dialog) {
final LinearLayout customView = (LinearLayout)
mInflater.inflate(dialog.mListLayoutWithTitle, null);
final RecycleListView listView = (RecycleListView) customView
.findViewById(R.id.select_dialog_listview);
ListAdapter adapter;
int layout = R.layout.ml_center_item;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
if (mCustomTitle != null) {
((TextView) (customView.findViewById(R.id.title))).setText(mCustomTitle);
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
listView.setAdapter(adapter);
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mView = customView;
dialog.mTransplantBg = true;
dialog.setCustomTransplant(mCustomBgTransplant);
}
private void createListView(final MLAlertController dialog) {
final RecycleListView listView = (RecycleListView)
mInflater.inflate(dialog.mListLayout, null);
ListAdapter adapter;
if (mIsMultiChoice) {
if (mCursor == null) {
adapter = new ArrayAdapter<CharSequence>(
mContext, dialog.mMultiChoiceItemLayout, R.id.text1, mItems) {
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view = super.getView(position, convertView, parent);
if (mCheckedItems != null) {
boolean isItemChecked = mCheckedItems[position];
if (isItemChecked) {
listView.setItemChecked(position, true);
}
}
return view;
}
};
} else {
adapter = new CursorAdapter(mContext, mCursor, false) {
private final int mLabelIndex;
private final int mIsCheckedIndex;
{
final Cursor cursor = getCursor();
mLabelIndex = cursor.getColumnIndexOrThrow(mLabelColumn);
mIsCheckedIndex = cursor.getColumnIndexOrThrow(mIsCheckedColumn);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
CheckedTextView text = (CheckedTextView) view.findViewById(R.id.text1);
text.setText(cursor.getString(mLabelIndex));
listView.setItemChecked(cursor.getPosition(),
cursor.getInt(mIsCheckedIndex) == 1);
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(dialog.mMultiChoiceItemLayout,
parent, false);
}
};
}
} else {
int layout = mIsSingleChoice
? dialog.mSingleChoiceItemLayout : dialog.mListItemLayout;
if (mCursor == null) {
adapter = (mAdapter != null) ? mAdapter
: new ArrayAdapter<CharSequence>(mContext, layout, R.id.text1, mItems);
} else {
adapter = new SimpleCursorAdapter(mContext, layout,
mCursor, new String[] {
mLabelColumn
}, new int[] {
R.id.text1
});
}
}
if (mOnPrepareListViewListener != null) {
mOnPrepareListViewListener.onPrepareListView(listView);
}
/*
* Don't directly set the adapter on the ListView as we might want
* to add a footer to the ListView later.
*/
dialog.mAdapter = adapter;
dialog.mCheckedItem = mCheckedItem;
if (mOnClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
mOnClickListener.onClick(dialog.mDialogInterface, position);
if (!mIsSingleChoice) {
dialog.mDialogInterface.dismiss();
}
}
});
} else if (mOnCheckboxClickListener != null) {
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView parent, View v, int position, long id) {
if (mCheckedItems != null) {
mCheckedItems[position] = listView.isItemChecked(position);
}
mOnCheckboxClickListener.onClick(
dialog.mDialogInterface, position, listView.isItemChecked(position));
}
});
}
// Attach a given OnItemSelectedListener to the ListView
if (mOnItemSelectedListener != null) {
listView.setOnItemSelectedListener(mOnItemSelectedListener);
}
if (mIsSingleChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
} else if (mIsMultiChoice) {
listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
}
listView.mRecycleOnMeasure = mRecycleOnMeasure;
dialog.mListView = listView;
dialog.setCustomTransplant(mCustomBgTransplant);
}
}
}
| Liyueyang/NewXmPluginSDK | common_ui/src/main/java/com/xiaomi/smarthome/common/ui/dialog/MLAlertController.java | Java | apache-2.0 | 39,780 |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const options_1 = require("./options");
class TslintFixTask {
constructor(configOrPath, options) {
if (options) {
this._configOrPath = configOrPath;
this._options = options;
}
else {
this._options = configOrPath;
this._configOrPath = null;
}
}
toConfiguration() {
const path = typeof this._configOrPath == 'string' ? { tslintPath: this._configOrPath } : {};
const config = typeof this._configOrPath == 'object' && this._configOrPath !== null
? { tslintConfig: this._configOrPath }
: {};
const options = {
...this._options,
...path,
...config,
};
return { name: options_1.TslintFixName, options };
}
}
exports.TslintFixTask = TslintFixTask;
| cloudfoundry-community/asp.net5-buildpack | fixtures/node_apps/angular_dotnet/ClientApp/node_modules/@angular-devkit/schematics/tasks/tslint-fix/task.js | JavaScript | apache-2.0 | 913 |
/*
* Copyright 2007 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.reteoo;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.InternalWorkingMemoryEntryPoint;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.common.RuleBasePartitionId;
import org.drools.core.util.Iterator;
import org.drools.core.util.ObjectHashSet.ObjectEntry;
import org.drools.core.reteoo.LeftInputAdapterNode.LiaNodeMemory;
import org.drools.core.reteoo.ObjectTypeNode.ObjectTypeNodeMemory;
import org.drools.core.reteoo.builder.BuildContext;
import org.drools.core.rule.EntryPointId;
import org.drools.core.spi.ObjectType;
import org.drools.core.spi.PropagationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* A node that is an entry point into the Rete network.
*
* As we move the design to support network partitions and concurrent processing
* of parts of the network, we also need to support multiple, independent entry
* points and this class represents that.
*
* It replaces the function of the Rete Node class in previous designs.
*
* @see ObjectTypeNode
*/
public class EntryPointNode extends ObjectSource
implements
Externalizable,
ObjectSink {
// ------------------------------------------------------------
// Instance members
// ------------------------------------------------------------
private static final long serialVersionUID = 510l;
protected static transient Logger log = LoggerFactory.getLogger(EntryPointNode.class);
/**
* The entry point ID for this node
*/
private EntryPointId entryPoint;
/**
* The object type nodes under this node
*/
private Map<ObjectType, ObjectTypeNode> objectTypeNodes;
private ObjectTypeNode queryNode;
private ObjectTypeNode activationNode;
// ------------------------------------------------------------
// Constructors
// ------------------------------------------------------------
public EntryPointNode() {
}
public EntryPointNode(final int id,
final ObjectSource objectSource,
final BuildContext context) {
this( id,
context.getPartitionId(),
context.getKnowledgeBase().getConfiguration().isMultithreadEvaluation(),
objectSource,
context.getCurrentEntryPoint() ); // irrelevant for this node, since it overrides sink management
}
public EntryPointNode(final int id,
final RuleBasePartitionId partitionId,
final boolean partitionsEnabled,
final ObjectSource objectSource,
final EntryPointId entryPoint) {
super( id,
partitionId,
partitionsEnabled,
objectSource,
999 ); // irrelevant for this node, since it overrides sink management
this.entryPoint = entryPoint;
this.objectTypeNodes = new ConcurrentHashMap<ObjectType, ObjectTypeNode>();
}
// ------------------------------------------------------------
// Instance methods
// ------------------------------------------------------------
@SuppressWarnings("unchecked")
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
super.readExternal( in );
entryPoint = (EntryPointId) in.readObject();
objectTypeNodes = (Map<ObjectType, ObjectTypeNode>) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
super.writeExternal( out );
out.writeObject( entryPoint );
out.writeObject( objectTypeNodes );
}
public short getType() {
return NodeTypeEnums.EntryPointNode;
}
/**
* @return the entryPoint
*/
public EntryPointId getEntryPoint() {
return entryPoint;
}
void setEntryPoint(EntryPointId entryPoint) {
this.entryPoint = entryPoint;
}
public void assertQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
// There may be no queries defined
this.queryNode.assertObject( factHandle, context, workingMemory );
}
}
public void retractQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
// There may be no queries defined
this.queryNode.retractObject( factHandle, context, workingMemory );
}
}
public void modifyQuery(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
if ( queryNode != null ) {
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this );
factHandle.clearLeftTuples();
factHandle.clearRightTuples();
// There may be no queries defined
this.queryNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory );
modifyPreviousTuples.retractTuples( context, workingMemory );
}
}
public ObjectTypeNode getQueryNode() {
if ( queryNode == null ) {
this.queryNode = objectTypeNodes.get( ClassObjectType.DroolsQuery_ObjectType );
}
return this.queryNode;
}
public void assertActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
// There may be no queries defined
this.activationNode.assertObject( factHandle, context, workingMemory );
}
}
public void retractActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
// There may be no queries defined
this.activationNode.retractObject( factHandle, context, workingMemory );
}
}
public void modifyActivation(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
if ( activationNode == null ) {
this.activationNode = objectTypeNodes.get( ClassObjectType.Match_ObjectType );
}
if ( activationNode != null ) {
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(factHandle.getFirstLeftTuple(), factHandle.getFirstRightTuple(), this );
factHandle.clearLeftTuples();
factHandle.clearRightTuples();
// There may be no queries defined
this.activationNode.modifyObject( factHandle, modifyPreviousTuples, context, workingMemory );
modifyPreviousTuples.retractTuples( context, workingMemory );
}
}
public void assertObject(final InternalFactHandle handle,
final PropagationContext context,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory workingMemory) {
if ( log.isTraceEnabled() ) {
log.trace( "Insert {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
for ( int i = 0, length = cachedNodes.length; i < length; i++ ) {
cachedNodes[i].assertObject( handle,
context,
workingMemory );
}
}
public void modifyObject(final InternalFactHandle handle,
final PropagationContext pctx,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory wm) {
if ( log.isTraceEnabled() ) {
log.trace( "Update {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
// make a reference to the previous tuples, then null then on the handle
ModifyPreviousTuples modifyPreviousTuples = new ModifyPreviousTuples(handle.getFirstLeftTuple(), handle.getFirstRightTuple(), this );
handle.clearLeftTuples();
handle.clearRightTuples();
for ( int i = 0, length = cachedNodes.length; i < length; i++ ) {
cachedNodes[i].modifyObject( handle,
modifyPreviousTuples,
pctx, wm );
// remove any right tuples that matches the current OTN before continue the modify on the next OTN cache entry
if (i < cachedNodes.length - 1) {
RightTuple rightTuple = modifyPreviousTuples.peekRightTuple();
while ( rightTuple != null &&
(( BetaNode ) rightTuple.getRightTupleSink()).getObjectTypeNode() == cachedNodes[i] ) {
modifyPreviousTuples.removeRightTuple();
doRightDelete(pctx, wm, rightTuple);
rightTuple = modifyPreviousTuples.peekRightTuple();
}
LeftTuple leftTuple;
ObjectTypeNode otn;
while ( true ) {
leftTuple = modifyPreviousTuples.peekLeftTuple();
otn = null;
if (leftTuple != null) {
LeftTupleSink leftTupleSink = leftTuple.getLeftTupleSink();
if (leftTupleSink instanceof LeftTupleSource) {
otn = ((LeftTupleSource)leftTupleSink).getLeftTupleSource().getObjectTypeNode();
} else if (leftTupleSink instanceof RuleTerminalNode) {
otn = ((RuleTerminalNode)leftTupleSink).getObjectTypeNode();
}
}
if ( otn == null || otn == cachedNodes[i+1] ) break;
modifyPreviousTuples.removeLeftTuple();
doDeleteObject(pctx, wm, leftTuple);
}
}
}
modifyPreviousTuples.retractTuples( pctx, wm );
}
public void doDeleteObject(PropagationContext pctx, InternalWorkingMemory wm, LeftTuple leftTuple) {
LeftInputAdapterNode liaNode = (LeftInputAdapterNode) leftTuple.getLeftTupleSink().getLeftTupleSource();
LiaNodeMemory lm = ( LiaNodeMemory ) wm.getNodeMemory( liaNode );
LeftInputAdapterNode.doDeleteObject( leftTuple, pctx, lm.getSegmentMemory(), wm, liaNode, true, lm );
}
public void doRightDelete(PropagationContext pctx, InternalWorkingMemory wm, RightTuple rightTuple) {
rightTuple.setPropagationContext( pctx );
rightTuple.getRightTupleSink().retractRightTuple( rightTuple, pctx, wm );
}
public void modifyObject(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
// this method was silently failing, so I am now throwing an exception to make
// sure no one calls it by mistake
throw new UnsupportedOperationException( "This method should NEVER EVER be called" );
}
/**
* This is the entry point into the network for all asserted Facts. Iterates a cache
* of matching <code>ObjectTypdeNode</code>s asserting the Fact. If the cache does not
* exist it first iterates and builds the cache.
*
* @param factHandle
* The FactHandle of the fact to assert
* @param context
* The <code>PropagationContext</code> of the <code>WorkingMemory</code> action
* @param workingMemory
* The working memory session.
*/
public void assertObject(final InternalFactHandle factHandle,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
// this method was silently failing, so I am now throwing an exception to make
// sure no one calls it by mistake
throw new UnsupportedOperationException( "This method should NEVER EVER be called" );
}
/**
* Retract a fact object from this <code>RuleBase</code> and the specified
* <code>WorkingMemory</code>.
*
* @param handle
* The handle of the fact to retract.
* @param workingMemory
* The working memory session.
*/
public void retractObject(final InternalFactHandle handle,
final PropagationContext context,
final ObjectTypeConf objectTypeConf,
final InternalWorkingMemory workingMemory) {
if ( log.isTraceEnabled() ) {
log.trace( "Delete {}", handle.toString() );
}
ObjectTypeNode[] cachedNodes = objectTypeConf.getObjectTypeNodes();
if ( cachedNodes == null ) {
// it is possible that there are no ObjectTypeNodes for an object being retracted
return;
}
for ( int i = 0; i < cachedNodes.length; i++ ) {
cachedNodes[i].retractObject( handle,
context,
workingMemory );
}
}
/**
* Adds the <code>ObjectSink</code> so that it may receive
* <code>Objects</code> propagated from this <code>ObjectSource</code>.
*
* @param objectSink
* The <code>ObjectSink</code> to receive propagated
* <code>Objects</code>. Rete only accepts <code>ObjectTypeNode</code>s
* as parameters to this method, though.
*/
public void addObjectSink(final ObjectSink objectSink) {
final ObjectTypeNode node = (ObjectTypeNode) objectSink;
this.objectTypeNodes.put( node.getObjectType(),
node );
}
public void removeObjectSink(final ObjectSink objectSink) {
final ObjectTypeNode node = (ObjectTypeNode) objectSink;
this.objectTypeNodes.remove( node.getObjectType() );
}
public void attach( BuildContext context ) {
this.source.addObjectSink( this );
if (context == null ) {
return;
}
if ( context.getKnowledgeBase().getConfiguration().isPhreakEnabled() ) {
for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) {
workingMemory.updateEntryPointsCache();
}
return;
}
for ( InternalWorkingMemory workingMemory : context.getWorkingMemories() ) {
workingMemory.updateEntryPointsCache();
PropagationContextFactory pctxFactory = workingMemory.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory();
final PropagationContext propagationContext = pctxFactory.createPropagationContext(workingMemory.getNextPropagationIdCounter(), PropagationContext.RULE_ADDITION, null, null, null);
this.source.updateSink( this,
propagationContext,
workingMemory );
}
}
protected void doRemove(final RuleRemovalContext context,
final ReteooBuilder builder,
final InternalWorkingMemory[] workingMemories) {
}
public Map<ObjectType, ObjectTypeNode> getObjectTypeNodes() {
return this.objectTypeNodes;
}
public int hashCode() {
return this.entryPoint.hashCode();
}
public boolean equals(final Object object) {
if ( object == this ) {
return true;
}
if ( object == null || !(object instanceof EntryPointNode) ) {
return false;
}
final EntryPointNode other = (EntryPointNode) object;
return this.entryPoint.equals( other.entryPoint );
}
public void updateSink(final ObjectSink sink,
final PropagationContext context,
final InternalWorkingMemory workingMemory) {
// @todo
// JBRULES-612: the cache MUST be invalidated when a new node type is added to the network, so iterate and reset all caches.
final ObjectTypeNode node = (ObjectTypeNode) sink;
final ObjectType newObjectType = node.getObjectType();
InternalWorkingMemoryEntryPoint wmEntryPoint = (InternalWorkingMemoryEntryPoint) workingMemory.getWorkingMemoryEntryPoint( this.entryPoint.getEntryPointId() );
for ( ObjectTypeConf objectTypeConf : wmEntryPoint.getObjectTypeConfigurationRegistry().values() ) {
if ( newObjectType.isAssignableFrom( objectTypeConf.getConcreteObjectTypeNode().getObjectType() ) ) {
objectTypeConf.resetCache();
ObjectTypeNode sourceNode = objectTypeConf.getConcreteObjectTypeNode();
Iterator it = ((ObjectTypeNodeMemory) workingMemory.getNodeMemory( sourceNode )).memory.iterator();
for ( ObjectEntry entry = (ObjectEntry) it.next(); entry != null; entry = (ObjectEntry) it.next() ) {
sink.assertObject( (InternalFactHandle) entry.getValue(),
context,
workingMemory );
}
}
}
}
public boolean isObjectMemoryEnabled() {
return false;
}
public void setObjectMemoryEnabled(boolean objectMemoryEnabled) {
throw new UnsupportedOperationException( "Entry Point Node has no Object memory" );
}
public String toString() {
return "[EntryPointNode(" + this.id + ") " + this.entryPoint + " ]";
}
public void byPassModifyToBetaNode(InternalFactHandle factHandle,
ModifyPreviousTuples modifyPreviousTuples,
PropagationContext context,
InternalWorkingMemory workingMemory) {
throw new UnsupportedOperationException();
}
@Override
public long calculateDeclaredMask(List<String> settableProperties) {
throw new UnsupportedOperationException();
}
}
| bxf12315/drools | drools-core/src/main/java/org/drools/core/reteoo/EntryPointNode.java | Java | apache-2.0 | 20,815 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* A complex type that contains information about that can be associated with your hosted zone.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/route53-2013-04-01/ListVPCAssociationAuthorizations"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListVPCAssociationAuthorizationsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*/
private String hostedZoneId;
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*/
private String nextToken;
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*/
private String maxResults;
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
*/
public void setHostedZoneId(String hostedZoneId) {
this.hostedZoneId = hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @return The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
*/
public String getHostedZoneId() {
return this.hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone for which you want a list of VPCs that can be associated with the hosted zone.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withHostedZoneId(String hostedZoneId) {
setHostedZoneId(hostedZoneId);
return this;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @param nextToken
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @return <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and include
* the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in another
* <code>ListVPCAssociationAuthorizations</code> request.
* </p>
*
* @param nextToken
* <i>Optional</i>: If a response includes a <code>NextToken</code> element, there are more VPCs that can be
* associated with the specified hosted zone. To get the next page of results, submit another request, and
* include the value of <code>NextToken</code> from the response in the <code>nexttoken</code> parameter in
* another <code>ListVPCAssociationAuthorizations</code> request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @param maxResults
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
*/
public void setMaxResults(String maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @return <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
*/
public String getMaxResults() {
return this.maxResults;
}
/**
* <p>
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to return. If
* you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs per page.
* </p>
*
* @param maxResults
* <i>Optional</i>: An integer that specifies the maximum number of VPCs that you want Amazon Route 53 to
* return. If you don't specify a value for <code>MaxResults</code>, Amazon Route 53 returns up to 50 VPCs
* per page.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListVPCAssociationAuthorizationsRequest withMaxResults(String maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHostedZoneId() != null)
sb.append("HostedZoneId: ").append(getHostedZoneId()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListVPCAssociationAuthorizationsRequest == false)
return false;
ListVPCAssociationAuthorizationsRequest other = (ListVPCAssociationAuthorizationsRequest) obj;
if (other.getHostedZoneId() == null ^ this.getHostedZoneId() == null)
return false;
if (other.getHostedZoneId() != null && other.getHostedZoneId().equals(this.getHostedZoneId()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHostedZoneId() == null) ? 0 : getHostedZoneId().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public ListVPCAssociationAuthorizationsRequest clone() {
return (ListVPCAssociationAuthorizationsRequest) super.clone();
}
}
| dagnir/aws-sdk-java | aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/ListVPCAssociationAuthorizationsRequest.java | Java | apache-2.0 | 11,109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.invertedindex.index;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.roaringbitmap.RoaringBitmap;
/**
* @author yangli9
*/
public interface ColumnValueContainer {
void append(ImmutableBytesWritable valueBytes);
void closeForChange();
int getSize();
// works only after closeForChange()
void getValueAt(int i, ImmutableBytesWritable valueBytes);
RoaringBitmap getBitMap(Integer startId, Integer endId);
int getMaxValueId();
}
| lemire/incubator-kylin | invertedindex/src/main/java/org/apache/kylin/invertedindex/index/ColumnValueContainer.java | Java | apache-2.0 | 1,361 |
/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 2014-2020 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _WIN32
#include <cerrno>
#endif
#include "connect.h"
#include "ioutils.h"
#include "hostlist.h"
#include "iotable.h"
#include "ssl.h"
lcbio_CSERR lcbio_mkcserr(int syserr)
{
switch (syserr) {
case 0:
return LCBIO_CSERR_CONNECTED;
case EINTR:
return LCBIO_CSERR_INTR;
case EWOULDBLOCK:
#ifdef USE_EAGAIN
case EAGAIN:
#endif
case EINPROGRESS:
case EALREADY:
return LCBIO_CSERR_BUSY;
case EISCONN:
return LCBIO_CSERR_CONNECTED;
#ifdef _WIN32
case EINVAL:
return LCBIO_CSERR_EINVAL;
#endif
default:
return LCBIO_CSERR_EFAIL;
}
}
void lcbio_mksyserr(lcbio_OSERR in, lcbio_OSERR *out)
{
switch (in) {
case EINTR:
case EWOULDBLOCK:
#ifdef USE_EAGAIN
case EAGAIN:
#endif
case EINVAL:
case EINPROGRESS:
case EISCONN:
case EALREADY:
return;
default:
*out = in;
break;
}
}
static lcb_STATUS ioerr2lcberr(lcbio_OSERR in, const lcb_settings *settings)
{
switch (in) {
case 0:
return LCB_ERR_SOCKET_SHUTDOWN;
case ECONNREFUSED:
return LCB_ERR_CONNECTION_REFUSED;
case ENETUNREACH:
case EHOSTUNREACH:
case EHOSTDOWN:
return LCB_ERR_NODE_UNREACHABLE;
case EMFILE:
case ENFILE:
return LCB_ERR_FD_LIMIT_REACHED;
case EADDRINUSE:
case EADDRNOTAVAIL:
return LCB_ERR_CANNOT_GET_PORT;
case ECONNRESET:
case ECONNABORTED:
return LCB_ERR_CONNECTION_RESET;
default:
lcb_log(settings, "lcbio", LCB_LOG_WARN, __FILE__, __LINE__,
"OS errno %d (%s) does not have a direct client error code equivalent. Using NETWORK_ERROR", in,
strerror(in));
return LCB_ERR_NETWORK;
}
}
lcb_STATUS lcbio_mklcberr(lcbio_OSERR in, const lcb_settings *settings)
{
if (settings->detailed_neterr == 0) {
lcb_log(settings, "lcbio", LCB_LOG_WARN, __FILE__, __LINE__, "Translating errno=%d (%s), %s to LCB_ERR_NETWORK",
in, strerror(in), lcb_strerror_short(ioerr2lcberr(in, settings)));
return LCB_ERR_NETWORK;
}
return ioerr2lcberr(in, settings);
}
lcb_socket_t lcbio_E_ai2sock(lcbio_TABLE *io, struct addrinfo **ai, int *connerr)
{
lcb_socket_t ret = INVALID_SOCKET;
*connerr = 0;
for (; *ai; *ai = (*ai)->ai_next) {
ret = io->E_socket(*ai);
if (ret != INVALID_SOCKET) {
return ret;
} else {
*connerr = io->get_errno();
}
}
return ret;
}
lcb_sockdata_t *lcbio_C_ai2sock(lcbio_TABLE *io, struct addrinfo **ai, int *connerr)
{
lcb_sockdata_t *ret = nullptr;
for (; *ai; *ai = (*ai)->ai_next) {
ret = io->C_socket(*ai);
if (ret) {
return ret;
} else {
*connerr = IOT_ERRNO(io);
}
}
return ret;
}
static int saddr_to_host_and_port(struct sockaddr *saddr, int len, char *host, lcb_size_t nhost, char *port,
lcb_size_t nport)
{
return getnameinfo(saddr, len, host, nhost, port, nport, NI_NUMERICHOST | NI_NUMERICSERV);
}
static int saddr_to_string(struct sockaddr *saddr, int len, char *buf, lcb_size_t nbuf)
{
char h[NI_MAXHOST + 1];
char p[NI_MAXSERV + 1];
int rv;
rv = saddr_to_host_and_port(saddr, len, h, sizeof(h), p, sizeof(p));
if (rv < 0) {
return 0;
}
if (snprintf(buf, nbuf, "%s;%s", h, p) < 0) {
return 0;
}
return 1;
}
static void lcbio_cache_local_name(lcbio_CONNINFO *sock)
{
char addr_str[NI_MAXHOST + 1];
switch (sock->sa_local.ss_family) {
case AF_INET: {
auto *addr = (struct sockaddr_in *)&sock->sa_local;
inet_ntop(AF_INET, &(addr->sin_addr), addr_str, sizeof(addr_str));
strncpy(sock->ep_local.host, addr_str, sizeof(sock->ep_local.host));
snprintf(sock->ep_local.port, sizeof(sock->ep_local.port), "%d", (int)ntohs(addr->sin_port));
} break;
case AF_INET6: {
auto *addr = (struct sockaddr_in6 *)&sock->sa_local;
inet_ntop(AF_INET6, &(addr->sin6_addr), addr_str, sizeof(addr_str));
strncpy(sock->ep_local.host, addr_str, sizeof(sock->ep_local.host));
snprintf(sock->ep_local.port, sizeof(sock->ep_local.port), "%d", (int)ntohs(addr->sin6_port));
} break;
}
snprintf(sock->ep_local_host_and_port, sizeof(sock->ep_local_host_and_port), "%s:%s", sock->ep_local.host,
sock->ep_local.port);
}
void lcbio__load_socknames(lcbio_SOCKET *sock)
{
int n_salocal, n_saremote, rv;
struct lcb_nameinfo_st ni {
};
lcbio_CONNINFO *info = sock->info;
n_salocal = sizeof(info->sa_local);
n_saremote = sizeof(info->sa_remote);
ni.local.name = (struct sockaddr *)&info->sa_local;
ni.local.len = &n_salocal;
ni.remote.name = (struct sockaddr *)&info->sa_remote;
ni.remote.len = &n_saremote;
if (!IOT_IS_EVENT(sock->io)) {
if (!sock->u.sd) {
return;
}
rv = IOT_V1(sock->io).nameinfo(IOT_ARG(sock->io), sock->u.sd, &ni);
if (ni.local.len == nullptr || ni.remote.len == nullptr || rv < 0) {
return;
}
} else {
socklen_t sl_tmp = sizeof(info->sa_local);
if (sock->u.fd == INVALID_SOCKET) {
return;
}
rv = getsockname(sock->u.fd, ni.local.name, &sl_tmp);
n_salocal = sl_tmp;
if (rv < 0) {
return;
}
rv = getpeername(sock->u.fd, ni.remote.name, &sl_tmp);
n_saremote = sl_tmp;
if (rv < 0) {
return;
}
}
info->naddr = n_salocal;
lcbio_cache_local_name(info);
}
int lcbio_get_nameinfo(lcbio_SOCKET *sock, struct lcbio_NAMEINFO *nistrs)
{
lcbio_CONNINFO *info = sock->info;
if (!info) {
return 0;
}
if (!info->naddr) {
return 0;
}
if (!saddr_to_string((struct sockaddr *)&info->sa_remote, info->naddr, nistrs->remote, sizeof(nistrs->remote))) {
return 0;
}
if (!saddr_to_string((struct sockaddr *)&info->sa_local, info->naddr, nistrs->local, sizeof(nistrs->local))) {
return 0;
}
return 1;
}
int lcbio_is_netclosed(lcbio_SOCKET *sock, int flags)
{
lcbio_pTABLE iot = sock->io;
if (iot->is_E()) {
return iot->E_check_closed(sock->u.fd, flags);
} else {
return iot->C_check_closed(sock->u.sd, flags);
}
}
lcb_STATUS lcbio_enable_sockopt(lcbio_SOCKET *s, int cntl)
{
lcbio_pTABLE iot = s->io;
int rv;
int value = 1;
if (!iot->has_cntl()) {
return LCB_ERR_UNSUPPORTED_OPERATION;
}
if (iot->is_E()) {
rv = iot->E_cntl(s->u.fd, LCB_IO_CNTL_SET, cntl, &value);
} else {
rv = iot->C_cntl(s->u.sd, LCB_IO_CNTL_SET, cntl, &value);
}
if (rv != 0) {
return lcbio_mklcberr(IOT_ERRNO(iot), s->settings);
} else {
return LCB_SUCCESS;
}
}
const char *lcbio_strsockopt(int cntl)
{
switch (cntl) {
case LCB_IO_CNTL_TCP_KEEPALIVE:
return "TCP_KEEPALIVE";
case LCB_IO_CNTL_TCP_NODELAY:
return "TCP_NODELAY";
default:
return "FIXME: Unknown option";
}
}
int lcbio_ssl_supported(void)
{
#ifdef LCB_NO_SSL
return 0;
#else
return 1;
#endif
}
lcbio_pSSLCTX lcbio_ssl_new__fallback(const char *, const char *, const char *, int, lcb_STATUS *errp, lcb_settings *)
{
if (errp) {
*errp = LCB_ERR_SDK_FEATURE_UNAVAILABLE;
}
return nullptr;
}
#ifdef LCB_NO_SSL
void lcbio_ssl_free(lcbio_pSSLCTX) {}
lcb_STATUS lcbio_ssl_apply(lcbio_SOCKET *, lcbio_pSSLCTX)
{
return LCB_ERR_SDK_FEATURE_UNAVAILABLE;
}
int lcbio_ssl_check(lcbio_SOCKET *)
{
return 0;
}
lcb_STATUS lcbio_ssl_get_error(lcbio_SOCKET *)
{
return LCB_SUCCESS;
}
void lcbio_ssl_global_init(void) {}
lcb_STATUS lcbio_sslify_if_needed(lcbio_SOCKET *, lcb_settings *)
{
return LCB_SUCCESS;
}
#endif
| couchbase/libcouchbase | src/lcbio/ioutils.cc | C++ | apache-2.0 | 8,898 |
package org.valuereporter.observation;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.valuereporter.QueryOperations;
import org.valuereporter.WriteOperations;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.List;
/**
* @author <a href="mailto:erik-dev@fjas.no">Erik Drolshammer</a>
*/
@Component
@Path("/observedmethods")
public class ObservedMethodsResouce {
private static final Logger log = LoggerFactory.getLogger(ObservedMethodsResouce.class);
private final QueryOperations queryOperations;
private final WriteOperations writeOperations;
private final ObjectMapper mapper;
/**
@Autowired
public ObservedMethodsResouce(QueryOperations queryOperations, WriteOperations writeOperations, ObjectMapper mapper) {
this.queryOperations = queryOperations;
this.writeOperations = writeOperations;
this.mapper = mapper;
}
**/
@Autowired
public ObservedMethodsResouce(ObservationsService observationsService, ObjectMapper mapper) {
this.queryOperations = observationsService;
this.writeOperations = observationsService;
this.mapper = mapper;
}
//http://localhost:4901/reporter/observe/observedmethods/{prefix}/{name}
/**
* A request with no filtering parameters should return a list of all observations.
*
* @param prefix prefix used to identify running process
* @param name package.classname.method
* @return List of observations
*/
@GET
@Path("/{prefix}/{name}")
@Produces(MediaType.APPLICATION_JSON)
public Response findObservationsByName(@PathParam("prefix") String prefix,@PathParam("name") String name) {
final List<ObservedMethod> observedMethods;
//Should also support no queryParams -> findAll
if (name != null ) {
log.trace("findObservationsByName name={}", name);
observedMethods = queryOperations.findObservationsByName(prefix, name);
} else {
throw new UnsupportedOperationException("You must supply a name. <package.classname.method>");
}
Writer strWriter = new StringWriter();
try {
mapper.writeValue(strWriter, observedMethods);
} catch (IOException e) {
log.error("Could not convert {} ObservedMethod to JSON.", observedMethods.size(), e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build();
}
return Response.ok(strWriter.toString()).build();
}
//http://localhost:4901/reporter/observe/observedmethods/{prefix}
@POST
@Path("/{prefix}")
@Produces(MediaType.APPLICATION_JSON)
public Response addObservationMethod(@PathParam("prefix") String prefix, String jsonBody){
log.trace("addObservationMethod prefix {} , jsonBody {}.", prefix, jsonBody);
List<ObservedMethod> observedMethods = null;
try {
observedMethods = mapper.readValue(jsonBody, new TypeReference<ArrayList<ObservedMethodJson>>(){ });
if (observedMethods != null) {
for (ObservedMethod observedMethod : observedMethods) {
observedMethod.setPrefix(prefix);
}
}
} catch (IOException e) {
log.warn("Unexpected error trying to produce list of ObservedMethod from \n prefix {} \n json {}, \n Reason {}",prefix, jsonBody, e.getMessage());
return Response.status(Response.Status.NOT_ACCEPTABLE).entity("Error converting to requested format.").build();
}
long updatedCount = writeOperations.addObservations(prefix,observedMethods);
String message = "added " + updatedCount + " observedMethods.";
Writer strWriter = new StringWriter();
try {
mapper.writeValue(strWriter, message);
} catch (IOException e) {
log.error("Could not convert {} to JSON.", updatedCount, e);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build();
}
return Response.ok(strWriter.toString()).build();
}
}
| altran/Valuereporter | src/main/java/org/valuereporter/observation/ObservedMethodsResouce.java | Java | apache-2.0 | 4,690 |
package com.lee.game;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import com.lee.base.activity.BaseActivity;
import com.lee.base.application.PackageNameContainer;
import com.noobyang.log.LogUtil;
import java.text.Collator;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
/**
* Main Activity
* <p/>
* Created by LiYang on 2019/4/8.
*/
public class MainActivity extends BaseActivity {
private static final String ACTION_SAMPLE_CODE = "com.lee.main.action.SAMPLE_CODE_GAME";
private static final String EXTRA_NAME_PATH = "com.lee.main.Path";
private static final String PATH_DIVIDED_SYMBOLS = ".";
private static final String PATH_DIVIDED_SYMBOLS_REGEX = "\\.";
@BindView(R.id.tv_path)
TextView tvPath;
@BindView(R.id.rv_sample_code)
RecyclerView rvSampleCode;
private PackageManager packageManager;
private List<SampleCodeEntity> sampleCodeEntities;
private SampleCodeAdapter sampleCodeAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
initData();
initView();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
setIntent(intent);
updateSampleCodes();
}
@Override
protected void onDestroy() {
super.onDestroy();
}
private void initData() {
packageManager = getPackageManager();
sampleCodeAdapter = new SampleCodeAdapter(this, sampleCodeEntities, itemClickListener);
}
private void initView() {
LinearLayoutManager layoutManager = new LinearLayoutManager(this);
layoutManager.setOrientation(LinearLayoutManager.VERTICAL);
rvSampleCode.setLayoutManager(layoutManager);
rvSampleCode.setAdapter(sampleCodeAdapter);
updateSampleCodes();
}
private void updateSampleCodes() {
String path = getIntent().getStringExtra(EXTRA_NAME_PATH);
initSampleCodes(path);
sampleCodeAdapter.setData(sampleCodeEntities);
sampleCodeAdapter.notifyDataSetChanged();
setPathText(path);
}
private void setPathText(String path) {
if (TextUtils.isEmpty(path)) {
tvPath.setText(R.string.app_name);
} else {
tvPath.setText(path);
}
}
protected void initSampleCodes(String path) {
if (sampleCodeEntities == null) {
sampleCodeEntities = new ArrayList<>();
} else {
sampleCodeEntities.clear();
}
List<ResolveInfo> sampleCodeResolveInfoList = getSampleCodeResolveInfoList();
if (sampleCodeResolveInfoList == null || sampleCodeResolveInfoList.size() == 0) {
return;
}
String[] prefixPaths;
String currentPrefixPath;
Map<String, Boolean> folderLabel = new HashMap<>();
String label;
String[] labelPath;
String sampleCodeLabel;
for (ResolveInfo sampleCodeResolveInfo : sampleCodeResolveInfoList) {
if (TextUtils.isEmpty(path)) {
prefixPaths = null;
currentPrefixPath = null;
} else {
path = getRelativeName(path);
prefixPaths = path.split(PATH_DIVIDED_SYMBOLS_REGEX);
currentPrefixPath = path + PATH_DIVIDED_SYMBOLS;
}
label = getRelativeName(sampleCodeResolveInfo.activityInfo.name);
LogUtil.d("getData currentPrefixPath = " + currentPrefixPath + "---label = " + label);
if (TextUtils.isEmpty(currentPrefixPath) || label.startsWith(currentPrefixPath)) {
labelPath = label.split(PATH_DIVIDED_SYMBOLS_REGEX);
int prefixPathsLen = prefixPaths == null ? 0 : prefixPaths.length;
sampleCodeLabel = labelPath[prefixPathsLen];
if (prefixPathsLen == labelPath.length - 1) {
// activity
addActivityItem(sampleCodeEntities, sampleCodeLabel,
sampleCodeResolveInfo.activityInfo.applicationInfo.packageName,
sampleCodeResolveInfo.activityInfo.name);
} else {
// folder
if (folderLabel.get(sampleCodeLabel) == null) {
addFolderItem(sampleCodeEntities, sampleCodeLabel, currentPrefixPath);
folderLabel.put(sampleCodeLabel, true);
}
}
}
}
Collections.sort(sampleCodeEntities, comparator);
}
private String getRelativeName(String className) {
if (TextUtils.isEmpty(className)) {
return className;
}
for (String packageName : PackageNameContainer.getPackageNames()) {
if (className.startsWith(packageName + PATH_DIVIDED_SYMBOLS)) {
return className.substring(packageName.length() + 1);
}
}
return className;
}
private List<ResolveInfo> getSampleCodeResolveInfoList() {
Intent sampleCodeIntent = new Intent(ACTION_SAMPLE_CODE, null);
sampleCodeIntent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
return packageManager.queryIntentActivities(sampleCodeIntent, 0);
}
private final static Comparator<SampleCodeEntity> comparator =
new Comparator<SampleCodeEntity>() {
private final Collator collator = Collator.getInstance();
public int compare(SampleCodeEntity entity1, SampleCodeEntity entity2) {
return collator.compare(entity1.getTitle(), entity2.getTitle());
}
};
private void addActivityItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String packageName, String className) {
Intent activityIntent = new Intent();
activityIntent.setClassName(packageName, className);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_ACTIVITY, sampleCodeLabel, activityIntent);
}
private void addFolderItem(List<SampleCodeEntity> data, String sampleCodeLabel,
String currentPrefixPath) {
Intent folderIntent = new Intent();
folderIntent.setClass(this, MainActivity.class);
String path = TextUtils.isEmpty(currentPrefixPath) ? sampleCodeLabel : currentPrefixPath + sampleCodeLabel;
folderIntent.putExtra(EXTRA_NAME_PATH, path);
addItem(data, SampleCodeEntity.SampleCodeType.SAMPLE_CODE_TYPE_FOLDER, sampleCodeLabel, folderIntent);
}
protected void addItem(List<SampleCodeEntity> data, int type, String title, Intent intent) {
SampleCodeEntity entity = new SampleCodeEntity(type, title, intent);
data.add(entity);
}
private SampleCodeAdapter.OnItemClickListener itemClickListener =
new SampleCodeAdapter.OnItemClickListener() {
@Override
public void onItemClick(View view, int position) {
SampleCodeEntity entity = sampleCodeEntities.get(position);
if (entity != null) {
Intent intent = entity.getIntent();
intent.addCategory(Intent.CATEGORY_SAMPLE_CODE);
startActivity(intent);
}
}
};
}
| noobyang/AndroidStudy | game/src/main/java/com/lee/game/MainActivity.java | Java | apache-2.0 | 7,946 |
<?php
namespace Obj;
class Encoder {
private $key;
private $cipher;
private $mode;
/**
*
* args - array(
* 'hash' => hash method
* 'key' => path to private key
* 'encrypt' => encryption method
* )
*/
public function __construct($key, $cipher = MCRYPT_RIJNDAEL_128, $mode = MCRYPT_MODE_CBC) {
$keyString = file_get_contents($key, true);
$this->key = pack('H*', $keyString);
$this->cipher = $cipher;
$this->mode = $mode;
}
/**
* hash
*/
public static function hash($input, $hash = 'md5') {
return hash($hash, $input);
}
public function encrypt($plaintext) {
$key_size = strlen($this->key);
// create a random IV to use with CBC encoding
$iv_size = mcrypt_get_iv_size($this->cipher, $this->mode);
$iv = mcrypt_create_iv($iv_size, MCRYPT_RAND);
$ciphertext = mcrypt_encrypt($this->cipher, $this->key,
$plaintext, $this->mode, $iv);
$ciphertext = $iv . $ciphertext;
// encode the resulting cipher text so it can be represented by a string
$ciphertext_base64 = base64_encode($ciphertext);
return $ciphertext_base64;
}
public function decrypt($ciphertext_base64) {
$ciphertext_dec = base64_decode($ciphertext_base64);
$iv_size = mcrypt_get_iv_size($this->cipher, $this->mode);
// retrieves the IV, iv_size should be created using mcrypt_get_iv_size()
$iv_dec = substr($ciphertext_dec, 0, $iv_size);
// retrieves the cipher text (everything except the $iv_size in the front)
$ciphertext_dec = substr($ciphertext_dec, $iv_size);
// may remove 00h valued characters from end of plain text
$plaintext_dec = mcrypt_decrypt($this->cipher, $this->key,
$ciphertext_dec, $this->mode, $iv_dec);
return $plaintext_dec;
}
/**
* Generates a random string from the given charset of input length
*/
public static function randomString($length = 10, $set = null) {
if ($set == null) {
$set = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
}
$len = strlen($set);
$str = '';
while ($length--) {
$str .= $set[mt_rand(0, $len - 1)];
}
return $str;
}
}
?> | bnewcomer/code-samples | PHP_Projects/api-php/Obj/Encoder.php | PHP | apache-2.0 | 2,134 |
<?php
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apigee\Edge\Api\Monetization\Controller;
use Apigee\Edge\Api\Monetization\Entity\BalanceInterface;
use Apigee\Edge\Api\Monetization\Entity\PrepaidBalanceInterface;
use Apigee\Edge\Controller\EntityControllerInterface;
interface PrepaidBalanceControllerInterface extends EntityControllerInterface, PaginatedEntityListingControllerInterface
{
/**
* @param string $currencyCode
*
* @return \Apigee\Edge\Api\Monetization\Entity\BalanceInterface|null
*/
public function getByCurrency(string $currencyCode): ?BalanceInterface;
/**
* @param float $amount
* @param string $currencyCode
*
* @return \Apigee\Edge\Api\Monetization\Entity\BalanceInterface
*/
public function topUpBalance(float $amount, string $currencyCode): BalanceInterface;
/**
* Enables and modifies recurring payment settings.
*
* @param string $currencyCode
* @param string $paymentProviderId
* @param float $replenishAmount
* @param float $recurringAmount
*
* @return \Apigee\Edge\Api\Monetization\Entity\BalanceInterface
*/
public function setupRecurringPayments(string $currencyCode, string $paymentProviderId, float $replenishAmount, float $recurringAmount): BalanceInterface;
/**
* Deactivate recurring payments.
*
* @param string $currencyCode
* @param string $paymentProviderId
*
* @return \Apigee\Edge\Api\Monetization\Entity\BalanceInterface
*/
public function disableRecurringPayments(string $currencyCode, string $paymentProviderId): BalanceInterface;
/**
* Gets prepaid balances.
*
* @param \DateTimeImmutable $billingMonth
*
* @return \Apigee\Edge\Api\Monetization\Entity\PrepaidBalanceInterface[]
*/
public function getPrepaidBalance(\DateTimeImmutable $billingMonth): array;
/**
* Gets prepaid balance by currency.
*
* @param string $currencyCode
* @param \DateTimeImmutable $billingMonth
*
* @return \Apigee\Edge\Api\Monetization\Entity\PrepaidBalanceInterface|null
*/
public function getPrepaidBalanceByCurrency(string $currencyCode, \DateTimeImmutable $billingMonth): ?PrepaidBalanceInterface;
}
| apigee/apigee-client-php | src/Api/Monetization/Controller/PrepaidBalanceControllerInterface.php | PHP | apache-2.0 | 2,834 |
using System;
using System.Globalization;
using System.Resources;
using System.Diagnostics;
using System.Reflection;
namespace Routrek.SSHC
{
/// <summary>
/// StringResource ÌTvÌà¾Å·B
/// </summary>
internal class StringResources {
private string _resourceName;
private ResourceManager _resMan;
public StringResources(string name, Assembly asm) {
_resourceName = name;
LoadResourceManager(name, asm);
}
public string GetString(string id)
{
try
{
return _resMan.GetString(id); //൱êªx¢æ¤Èç±ÌNXÅLbV
ÅàÂê΢¢¾ë¤
}
catch
{
return "error loading string";
}
}
private void LoadResourceManager(string name, Assembly asm) {
//ÊÍpêEú{굩µÈ¢
CultureInfo ci = System.Threading.Thread.CurrentThread.CurrentUICulture;
//if(ci.Name.StartsWith("ja"))
//_resMan = new ResourceManager(name+"_ja", asm);
//else
_resMan = new ResourceManager(name, asm);
}
}
} | ehazlett/sshconsole | TerminalControl/StringResource.cs | C# | apache-2.0 | 1,022 |
/*
* Copyright 2014 http://Bither.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.bither.util;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import net.bither.BitherApplication;
import net.bither.bitherj.utils.Utils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
public class FileUtil {
// old tickerName file
private static final String HUOBI_TICKER_NAME = "huobi.ticker";
private static final String BITSTAMP_TICKER_NAME = "bitstamp.ticker";
private static final String BTCE_TICKER_NAME = "btce.ticker";
private static final String OKCOIN_TICKER_NAME = "okcoin.ticker";
private static final String CHBTC_TICKER_NAME = "chbtc.ticker";
private static final String BTCCHINA_TICKER_NAME = "btcchina.ticker";
private static final String BITHER_BACKUP_SDCARD_DIR = "BitherBackup";
private static final String BITHER_BACKUP_ROM_DIR = "backup";
private static final String BITHER_BACKUP_HOT_FILE_NAME = "keys";
private static final String EXCAHNGE_TICKER_NAME = "exchange.ticker";
private static final String EXCHANGE_KLINE_NAME = "exchange.kline";
private static final String EXCHANGE_DEPTH_NAME = "exchange.depth";
private static final String PRICE_ALERT = "price.alert";
private static final String EXCHANGERATE = "exchangerate";
private static final String CURRENCIES_RATE = "currencies_rate";
private static final String MARKET_CAHER = "mark";
private static final String IMAGE_CACHE_DIR = "image";
private static final String IMAGE_SHARE_FILE_NAME = "share.jpg";
private static final String IMAGE_CACHE_UPLOAD = IMAGE_CACHE_DIR + "/upload";
private static final String IMAGE_CACHE_612 = IMAGE_CACHE_DIR + "/612";
private static final String IMAGE_CACHE_150 = IMAGE_CACHE_DIR + "/150";
private static final String AD_CACHE = "ad";
private static final String AD_NAME = "ad.json";
private static final String AD_IMAGE_EN_CACHE = AD_CACHE + "/img_en";
private static final String AD_IMAGE_ZH_CN_CACHE = AD_CACHE + "/img_zh_CN";
private static final String AD_IMAGE_ZH_TW_CACHE = AD_CACHE + "/img_zh_TW";
/**
* sdCard exist
*/
public static boolean existSdCardMounted() {
String storageState = android.os.Environment.getExternalStorageState();
if (Utils.isEmpty(storageState)) {
return false;
}
return Utils.compareString(storageState,
android.os.Environment.MEDIA_MOUNTED);
}
public static File getSDPath() {
File sdDir = Environment.getExternalStorageDirectory();
return sdDir;
}
public static File getBackupSdCardDir() {
File backupDir = new File(getSDPath(), BITHER_BACKUP_SDCARD_DIR);
if (!backupDir.exists()) {
backupDir.mkdirs();
}
return backupDir;
}
public static File getBackupFileOfCold() {
File file = new File(getBackupSdCardDir(),
DateTimeUtil.getNameForFile(System.currentTimeMillis())
+ ".bak"
);
return file;
}
public static List<File> getBackupFileListOfCold() {
File dir = getBackupSdCardDir();
List<File> fileList = new ArrayList<File>();
File[] files = dir.listFiles();
if (files != null && files.length > 0) {
files = orderByDateDesc(files);
for (File file : files) {
if (StringUtil.checkBackupFileOfCold(file.getName())) {
fileList.add(file);
}
}
}
return fileList;
}
private static File getBackupRomDir() {
File backupDir = new File(Utils.getWalletRomCache(), BITHER_BACKUP_ROM_DIR);
if (!backupDir.exists()) {
backupDir.mkdirs();
}
return backupDir;
}
public static File getBackupKeyOfHot() {
File backupDir = getBackupRomDir();
return new File(backupDir, BITHER_BACKUP_HOT_FILE_NAME);
}
public static File getDiskDir(String dirName, Boolean createNomedia) {
File dir = getDiskCacheDir(BitherApplication.mContext, dirName);
if (!dir.exists()) {
dir.mkdirs();
if (createNomedia) {
try {
File noMediaFile = new File(dir, ".nomedia");
noMediaFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return dir;
}
public static Uri saveShareImage(Bitmap bmp) {
File dir = getDiskDir(IMAGE_CACHE_DIR, true);
File jpg = new File(dir, IMAGE_SHARE_FILE_NAME);
NativeUtil.compressBitmap(bmp, 85, jpg.getAbsolutePath(), true);
return Uri.fromFile(jpg);
}
public static File getExternalCacheDir(Context context) {
// if (SdkUtils.hasFroyo()) {
//
// return context.getCacheDir();
// }
// Before Froyo we need to construct the external cache dir ourselves
final String cacheDir = "/Android/data/" + context.getPackageName()
+ "/cache/";
return new File(Environment.getExternalStorageDirectory().getPath()
+ cacheDir);
}
public static File getDiskCacheDir(Context context, String uniqueName) {
File extCacheDir = getExternalCacheDir(context);
final String cachePath = (Environment.MEDIA_MOUNTED.equals(Environment
.getExternalStorageState()) || !isExternalStorageRemovable())
&& extCacheDir != null ? extCacheDir.getPath() : context
.getCacheDir().getPath();
return new File(cachePath + File.separator + uniqueName);
}
@TargetApi(9)
public static boolean isExternalStorageRemovable() {
if (SdkUtils.hasGingerbread()) {
return Environment.isExternalStorageRemovable();
}
return true;
}
private static File getMarketCache() {
return getDiskDir(MARKET_CAHER, false);
}
public static File getAdImageEnDir() {
return getDiskDir(AD_IMAGE_EN_CACHE, true);
}
public static File getAdImagZhCnDir() {
return getDiskDir(AD_IMAGE_ZH_CN_CACHE, true);
}
public static File getAdImagZhTwDir() {
return getDiskDir(AD_IMAGE_ZH_TW_CACHE, true);
}
private static File getAdDir() {
return getDiskDir(AD_CACHE, false);
}
public static File getUploadImageDir() {
return getDiskDir(IMAGE_CACHE_UPLOAD, true);
}
public static File getAvatarDir() {
return getDiskDir(IMAGE_CACHE_612, true);
}
public static File getSmallAvatarDir() {
return getDiskDir(IMAGE_CACHE_150, true);
}
public static File getExchangeRateFile() {
File file = getDiskDir("", false);
return new File(file, EXCHANGERATE);
}
public static File getCurrenciesRateFile() {
File file = getDiskDir("", false);
return new File(file, CURRENCIES_RATE);
}
public static File getTickerFile() {
File file = getMarketCache();
file = new File(file, EXCAHNGE_TICKER_NAME);
return file;
}
public static File getPriceAlertFile() {
File marketDir = getMarketCache();
return new File(marketDir, PRICE_ALERT);
}
public static File getKlineFile() {
File file = getMarketCache();
file = new File(file, EXCHANGE_KLINE_NAME);
return file;
}
public static File getDepthFile() {
File file = getMarketCache();
file = new File(file, EXCHANGE_DEPTH_NAME);
return file;
}
public static File getAdFile() {
File file = getAdDir();
file = new File(file, AD_NAME);
return file;
}
@SuppressWarnings("resource")
public static Object deserialize(File file) {
FileInputStream fos = null;
try {
if (!file.exists()) {
return null;
}
fos = new FileInputStream(file);
ObjectInputStream ois;
ois = new ObjectInputStream(fos);
Object object = ois.readObject();
return object;
} catch (Exception e) {
e.printStackTrace();
return null;
} finally {
try {
if (fos != null) {
fos.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void serializeObject(File file, Object object) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(object);
oos.flush();
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static File[] orderByDateDesc(File[] fs) {
Arrays.sort(fs, new Comparator<File>() {
public int compare(File f1, File f2) {
long diff = f1.lastModified() - f2.lastModified();
if (diff > 0) {
return -1;//-1 f1 before f2
} else if (diff == 0) {
return 0;
} else {
return 1;
}
}
public boolean equals(Object obj) {
return true;
}
});
return fs;
}
public static void copyFile(File src, File tar) throws Exception {
if (src.isFile()) {
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
InputStream is = new FileInputStream(src);
bis = new BufferedInputStream(is);
OutputStream op = new FileOutputStream(tar);
bos = new BufferedOutputStream(op);
byte[] bt = new byte[8192];
int len = bis.read(bt);
while (len != -1) {
bos.write(bt, 0, len);
len = bis.read(bt);
}
bis.close();
bos.close();
} catch (Exception e) {
e.printStackTrace();
} finally {
}
} else if (src.isDirectory()) {
File[] files = src.listFiles();
tar.mkdir();
for (int i = 0;
i < files.length;
i++) {
copyFile(files[i].getAbsoluteFile(),
new File(tar.getAbsoluteFile() + File.separator
+ files[i].getName())
);
}
} else {
throw new FileNotFoundException();
}
}
public static void delFolder(String folderPath) {
try {
delAllFile(folderPath);
String filePath = folderPath;
filePath = filePath.toString();
java.io.File myFilePath = new java.io.File(filePath);
myFilePath.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
private static void delAllFile(String path) {
File file = new File(path);
if (!file.exists()) {
return;
}
if (!file.isDirectory()) {
return;
}
String[] tempList = file.list();
if (tempList == null) {
return;
}
File temp = null;
for (int i = 0;
i < tempList.length;
i++) {
if (path.endsWith(File.separator)) {
temp = new File(path + tempList[i]);
} else {
temp = new File(path + File.separator + tempList[i]);
}
if (temp.isFile()) {
temp.delete();
}
if (temp.isDirectory()) {
delAllFile(path + "/" + tempList[i]);
delFolder(path + "/" + tempList[i]);
}
}
}
public static void upgradeTickerFile() {
File marketDir = getMarketCache();
File file = new File(marketDir, BITSTAMP_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, BTCE_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, HUOBI_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, OKCOIN_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, CHBTC_TICKER_NAME);
fileExistAndDelete(file);
file = new File(marketDir, BTCCHINA_TICKER_NAME);
fileExistAndDelete(file);
}
public static boolean fileExistAndDelete(File file) {
return file.exists() && file.delete();
}
public static File convertUriToFile(Activity activity, Uri uri) {
File file = null;
try {
String[] proj = {MediaStore.Images.Media.DATA};
@SuppressWarnings("deprecation")
Cursor actualimagecursor = activity.managedQuery(uri, proj, null,
null, null);
if (actualimagecursor != null) {
int actual_image_column_index = actualimagecursor
.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
actualimagecursor.moveToFirst();
String img_path = actualimagecursor
.getString(actual_image_column_index);
if (!Utils.isEmpty(img_path)) {
file = new File(img_path);
}
} else {
file = new File(new URI(uri.toString()));
if (file.exists()) {
return file;
}
}
} catch (Exception e) {
}
return file;
}
public static int getOrientationOfFile(String fileName) {
int orientation = 0;
try {
ExifInterface exif = new ExifInterface(fileName);
String orientationString = exif
.getAttribute(ExifInterface.TAG_ORIENTATION);
if (Utils.isNubmer(orientationString)) {
int orc = Integer.valueOf(orientationString);
switch (orc) {
case ExifInterface.ORIENTATION_ROTATE_90:
orientation = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
orientation = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
orientation = 270;
break;
default:
break;
}
}
} catch (IOException e) {
e.printStackTrace();
}
return orientation;
}
}
| bither/bither-android | bither-android/src/net/bither/util/FileUtil.java | Java | apache-2.0 | 16,209 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace pool
{
static class Program
{
/// <summary>
/// 应用程序的主入口点。
/// </summary>
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new Form1());
}
}
}
| knifecaojia/CoinsPro | Src/pool/Program.cs | C# | apache-2.0 | 494 |
package org.cobbzilla.util.jdbc;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class DbUrlUtil {
public static final Pattern JDBC_URL_REGEX = Pattern.compile("^jdbc:postgresql://[\\.\\w]+:\\d+/(.+)$");
public static String setDbName(String url, String dbName) {
final Matcher matcher = JDBC_URL_REGEX.matcher(url);
if (!matcher.find()) return url;
final String renamed = matcher.replaceFirst(dbName);
return renamed;
}
}
| cobbzilla/cobbzilla-utils | src/main/java/org/cobbzilla/util/jdbc/DbUrlUtil.java | Java | apache-2.0 | 495 |
'use strict';
var fs = require('fs');
var path = require('path');
var util = require('util');
var dbg = require('debug');
// process.env.TABTAB_DEBUG = process.env.TABTAB_DEBUG || '/tmp/tabtab.log';
var out = process.env.TABTAB_DEBUG ? fs.createWriteStream(process.env.TABTAB_DEBUG, { flags: 'a' }) : null;
module.exports = debug;
// Internal: Facade to debug module, which provides the exact same interface.
//
// The added benefit is with the TABTAB_DEBUG environment variable, which when
// defined, will write debug output to the specified filename.
//
// Usefull when debugging tab completion, as logs on stdout / stderr are either
// shallowed or used as tab completion results.
//
// namespace - The String namespace to use when TABTAB_DEBUG is not defined,
// delegates to debug module.
//
// Examples
//
// // Use with following command to redirect output to file
// // TABTAB_DEBUG="debug.log" tabtab ...
// debug('Foo');
function debug(namespace) {
var log = dbg(namespace);
return function () {
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
args = args.map(function (arg) {
if (typeof arg === 'string') return arg;
return JSON.stringify(arg);
});
out && out.write(util.format.apply(util, args) + '\n');
out || log.apply(null, args);
};
} | bhav0904/netDelSolution | node_modules/tabtab/src/debug.js | JavaScript | apache-2.0 | 1,398 |
<?php
/**
* This is the model class for table "product_type".
*
* The followings are the available columns in table 'product_type':
* @property string $product_type_id
* @property string $parent_product_type_id
* @property string $name
* @property string $created
*
* The followings are the available model relations:
* @property Product[] $products
* @property ProductMultiDay[] $productMultiDays
* @property ProductOneDay[] $productOneDays
*/
class ProductType extends CActiveRecord
{
/**
* Returns the static model of the specified AR class.
* @param string $className active record class name.
* @return ProductType the static model class
*/
public static function model($className=__CLASS__)
{
return parent::model($className);
}
/**
* @return string the associated database table name
*/
public function tableName()
{
return 'product_type';
}
/**
* @return array validation rules for model attributes.
*/
public function rules()
{
// NOTE: you should only define rules for those attributes that
// will receive user inputs.
return array(
array('parent_product_type_id', 'length', 'max'=>10),
);
}
/**
* @return array relational rules.
*/
public function relations()
{
// NOTE: you may need to adjust the relation name and the related
// class name for the relations automatically generated below.
return array();
}
/**
* @return array customized attribute labels (name=>label)
*/
public function attributeLabels()
{
return array(
'product_type_id' => 'Product Type',
'parent_product_type_id' => 'Parent Product Type',
'name' => 'Name',
'created' => 'Created',
);
}
} | CdGitHub/live | protected/models/ProductType.php | PHP | apache-2.0 | 1,879 |
=begin
Copyright 2012-2013 inBloom, Inc. and its affiliates.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require_relative '../lib/Shared/data_utility.rb'
require_relative '../lib/Shared/EntityClasses/enum/GradeLevelType.rb'
require_relative 'spec_helper'
# specifications for data utility
describe "DataUtility" do
before(:all) do
@yaml = YAML.load_file(File.join(File.dirname(__FILE__),'../config.yml'))
@prng = Random.new(@yaml['seed'])
end
after(:all) do
@yaml = nil
@prng = nil
end
describe "Generates correct _id for each supported entity" do
describe "#get_staff_unique_state_id" do
it "will generate a staff unique state id with the correct format" do
DataUtility.get_staff_unique_state_id(146724).should match("stff-0000146724")
end
end
describe "#get_teacher_unique_state_id" do
it "will generate a teacher unique state id with the correct format" do
DataUtility.get_teacher_unique_state_id(146724).should match("tech-0000146724")
end
end
end
describe "Handles requests for entities correctly" do
describe "--> request to get staff unique state id with string" do
it "will return the string that was input" do
DataUtility.get_staff_unique_state_id("rrogers").should match("rrogers")
end
end
describe "--> request to get staff unique state id with integer" do
it "will return the corresponding staff unique state id" do
DataUtility.get_staff_unique_state_id(17).should match("stff-0000000017")
end
end
describe "--> request to get teacher unique state id with string" do
it "will return the string that was input" do
DataUtility.get_teacher_unique_state_id("cgray").should match("cgray")
end
end
describe "--> request to get teacher unique state id with integer" do
it "will return the corresponding teacher unique state id" do
DataUtility.get_teacher_unique_state_id(18).should match("tech-0000000018")
end
end
describe "--> request to get random elementary school grade" do
it "will always return only grades that are in elementary school" do
grades = [:KINDERGARTEN, :FIRST_GRADE, :SECOND_GRADE, :THIRD_GRADE, :FOURTH_GRADE, :FIFTH_GRADE]
(1..25).each do
grades.include?(DataUtility.get_random_grade_for_type(@prng, "elementary")).should be_true
end
end
end
describe "--> request to get random middle school grade" do
it "will always return only grades that are in middle school" do
grades = [:SIXTH_GRADE, :SEVENTH_GRADE, :EIGHTH_GRADE]
(1..25).each do
grades.include?(DataUtility.get_random_grade_for_type(@prng, "middle")).should be_true
end
end
end
describe "--> request to get random high school grade" do
it "will always return only grades that are in high school" do
grades = [:NINTH_GRADE, :TENTH_GRADE, :ELEVENTH_GRADE, :TWELFTH_GRADE]
(1..25).each do
grades.include?(DataUtility.get_random_grade_for_type(@prng, "high")).should be_true
end
end
end
describe "--> request to get subset of choices" do
it "will return a subset of choices with correct size" do
options = [1,2,3,4,5,6,7,8,9,10]
subset = DataUtility.select_num_from_options(@prng, 5, options)
subset.size.should eq 5
subset.each do |number|
options.include?(number).should be_true
end
end
it "will return choices if the number specified is larger than the size of choices" do
options = [1,2,3,4,5,6,7,8,9,10]
subset = DataUtility.select_num_from_options(@prng, 15, options)
subset.size.should eq 10
subset.should eq options
end
it "will return an empty array is the number specified is zero" do
options = [1,2,3,4,5,6,7,8,9,10]
subset = DataUtility.select_num_from_options(@prng, 0, options)
subset.size.should eq 0
end
end
end
end
| inbloom/secure-data-service | tools/odin/spec/data_utility_spec.rb | Ruby | apache-2.0 | 4,529 |
from changes.api.serializer import Crumbler, register
from changes.models.node import Cluster
@register(Cluster)
class ClusterCrumbler(Crumbler):
def crumble(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'dateCreated': instance.date_created,
}
| dropbox/changes | changes/api/serializer/models/cluster.py | Python | apache-2.0 | 336 |
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core.services.jdbc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import org.junit.Test;
import com.adaptris.core.CoreException;
import com.adaptris.core.jdbc.JdbcConnection;
import com.adaptris.core.util.JdbcUtil;
import com.adaptris.core.util.LifecycleHelper;
import com.adaptris.util.KeyValuePair;
import com.adaptris.util.KeyValuePairSet;
public abstract class JdbcMapInsertCase {
protected static final String CONTENT =
"firstname=alice\n" +
"lastname=smith\n" +
"dob=2017-01-01";
protected static final String INVALID_COLUMN =
"fi$rstname=alice\n" + "la$stname=smith\n" + "dob=2017-01-01";
protected static final String JDBC_DRIVER = "org.apache.derby.jdbc.EmbeddedDriver";
protected static final String JDBC_URL = "jdbc:derby:memory:JDCB_OBJ_DB;create=true";
protected static final String TABLE_NAME = "people";
protected static final String DROP_STMT = String.format("DROP TABLE %s", TABLE_NAME);
protected static final String CREATE_STMT = String.format("CREATE TABLE %s (firstname VARCHAR(128) NOT NULL, "
+ "lastname VARCHAR(128) NOT NULL, "
+ "dob DATE)",
TABLE_NAME);
protected static final String CREATE_QUOTED = String.format(
"CREATE TABLE %s (\"firstname\" VARCHAR(128) NOT NULL, \"lastname\" VARCHAR(128) NOT NULL, \"dob\" DATE)", TABLE_NAME);
@Test
public void testService_Init() throws Exception {
JdbcMapInsert service = createService();
try {
LifecycleHelper.init(service);
fail();
} catch (CoreException expected) {
}
service.setTable("hello");
LifecycleHelper.init(service);
}
protected abstract JdbcMapInsert createService();
protected static void doAssert(int expectedCount) throws Exception {
Connection c = null;
PreparedStatement p = null;
try {
c = createConnection();
p = c.prepareStatement(String.format("SELECT * FROM %s", TABLE_NAME));
ResultSet rs = p.executeQuery();
int count = 0;
while (rs.next()) {
count++;
assertEquals("smith", rs.getString("lastname"));
}
assertEquals(expectedCount, count);
JdbcUtil.closeQuietly(rs);
} finally {
JdbcUtil.closeQuietly(p);
JdbcUtil.closeQuietly(c);
}
}
protected static Connection createConnection() throws Exception {
Connection c = null;
Class.forName(JDBC_DRIVER);
c = DriverManager.getConnection(JDBC_URL);
c.setAutoCommit(true);
return c;
}
protected static void createDatabase() throws Exception {
createDatabase(CREATE_STMT);
}
protected static void createDatabase(String createStmt) throws Exception {
Connection c = null;
Statement s = null;
try {
c = createConnection();
s = c.createStatement();
executeQuietly(s, DROP_STMT);
s.execute(createStmt);
}
finally {
JdbcUtil.closeQuietly(s);
JdbcUtil.closeQuietly(c);
}
}
protected static void executeQuietly(Statement s, String sql) {
try {
s.execute(sql);
} catch (Exception e) {
;
}
}
protected static <T extends JdbcMapInsert> T configureForTests(T t) {
JdbcMapInsert service = t;
JdbcConnection connection = new JdbcConnection();
connection.setConnectUrl(JDBC_URL);
connection.setDriverImp(JDBC_DRIVER);
service.setConnection(connection);
KeyValuePairSet mappings = new KeyValuePairSet();
mappings.add(new KeyValuePair("dob", JdbcMapInsert.BasicType.Date.name()));
service.withTable(TABLE_NAME).withMappings(mappings);
return t;
}
}
| adaptris/interlok | interlok-core/src/test/java/com/adaptris/core/services/jdbc/JdbcMapInsertCase.java | Java | apache-2.0 | 4,375 |
/*
* Project Scelight
*
* Copyright (c) 2013 Andras Belicza <iczaaa@gmail.com>
*
* This software is the property of Andras Belicza.
* Copying, modifying, distributing, refactoring without the author's permission
* is prohibited and protected by Law.
*/
package hu.scelight.gui.page.replist.column.impl;
import hu.scelight.gui.icon.Icons;
import hu.scelight.gui.page.replist.column.BaseColumn;
import hu.scelight.sc2.rep.repproc.RepProcessor;
import java.util.Date;
/**
* Replay date column.
*
* @author Andras Belicza
*/
public class DateColumn extends BaseColumn< Date > {
/**
* Creates a new {@link DateColumn}.
*/
public DateColumn() {
super( "Date", Icons.F_CALENDAR_BLUE, "Replay date", Date.class, true );
}
@Override
public Date getData( final RepProcessor repProc ) {
return repProc.replay.details.getTime();
}
}
| icza/scelight | src-app/hu/scelight/gui/page/replist/column/impl/DateColumn.java | Java | apache-2.0 | 896 |
"""Tests for the CSRF helper."""
import unittest
import mock
import webapp2
import webtest
from ctc.helpers import csrf
from ctc.testing import testutil
MOCKED_TIME = 123
# Tests don't need docstrings, so pylint: disable=C0111
# Tests can test protected members, so pylint: disable=W0212
class CsrfTests(testutil.CtcTestCase):
# Helpers
class TestHandler(csrf.CsrfHandler):
"""A handler for testing whether or not requests are CSRF protected."""
def get(self):
self.response.write('CSRF Token:%s' % self.csrf_token)
def post(self):
pass
def put(self):
pass
def delete(self):
pass
def setUp(self):
super(CsrfTests, self).setUp()
# The CSRF library uses the time, so we mock it out.
self.time_mock = mock.Mock()
csrf.time = self.time_mock
self.time_mock.time = mock.Mock(return_value=MOCKED_TIME)
# The handler tests need a WSGIApplication.
app = webapp2.WSGIApplication([('/', self.TestHandler)])
self.testapp = webtest.TestApp(app)
def test_get_secret_key(self):
first_key = csrf._get_secret_key()
self.assertEqual(len(first_key), 32)
second_key = csrf._get_secret_key()
self.assertEqual(first_key, second_key)
def test_tokens_are_equal(self):
# It should fail if the tokens aren't equal length.
self.assertFalse(csrf._tokens_are_equal('a', 'ab'))
# It should fail if the tokens are different.
self.assertFalse(csrf._tokens_are_equal('abcde', 'abcdf'))
# It should succeed if the tokens are the same.
self.assertTrue(csrf._tokens_are_equal('abcde', 'abcde'))
# Make Token
def test_make_token_includes_time(self):
self.login()
# It should get the current time.
token1 = csrf.make_token()
self.assertEqual(token1.split()[-1], str(MOCKED_TIME))
# It should use the provided time.
token2 = csrf.make_token(token_time='456')
self.assertEqual(token2.split()[-1], '456')
# Different time should cause the digest to be different.
self.assertNotEqual(token1.split()[0], token2.split()[0])
token3 = csrf.make_token(token_time='456')
self.assertEqual(token2, token3)
def test_make_token_requires_login(self):
token1 = csrf.make_token()
self.assertIsNone(token1)
self.login()
token2 = csrf.make_token()
self.assertIsNotNone(token2)
def test_make_token_includes_path(self):
self.login()
# It should get the current path.
self.testbed.setup_env(PATH_INFO='/action/1', overwrite=True)
token1 = csrf.make_token(token_time='123')
self.testbed.setup_env(PATH_INFO='/action/23', overwrite=True)
token2 = csrf.make_token(token_time='123')
token3 = csrf.make_token(token_time='123')
self.assertNotEqual(token1, token2)
self.assertEqual(token2, token3)
# It should let the client pass in a path.
token4 = csrf.make_token(path='/action/4', token_time='123')
token5 = csrf.make_token(path='/action/56', token_time='123')
token6 = csrf.make_token(path='/action/56', token_time='123')
self.assertNotEqual(token4, token5)
self.assertEqual(token5, token6)
# Token Is Valid
def test_token_is_valid(self):
self.login()
# Token is required.
self.assertFalse(csrf.token_is_valid(None))
# Token needs to have a timestamp on it.
self.assertFalse(csrf.token_is_valid('hello'))
# The timestamp needs to be within the current date range.
self.time_mock.time = mock.Mock(return_value=9999999999999)
self.assertFalse(csrf.token_is_valid('hello 123'))
# The user needs to be logged in.
token = csrf.make_token()
self.logout()
self.assertFalse(csrf.token_is_valid(token))
self.login()
# Modifying the token should break everything.
modified_token = '0' + token[1:]
if token == modified_token:
modified_token = '1' + token[1:]
self.assertFalse(csrf.token_is_valid(modified_token))
# The original token that we got should work.
self.assertTrue(csrf.token_is_valid(token))
def test_get_has_csrf_token(self):
self.login()
response = self.testapp.get('/', status=200).body
self.assertIn('CSRF Token:', response)
self.assertEqual(response.split(':')[-1], csrf.make_token())
def test_mutators_require_csrf_token(self):
self.login()
self.testapp.put('/', status=403)
self.testapp.post('/', status=403)
self.testapp.delete('/', status=403)
csrf_param = 'csrf_token=' + csrf.make_token(path='/')
self.testapp.put('/', params=csrf_param, status=200)
self.testapp.post('/', params=csrf_param, status=200)
# Though the spec allows DELETE to have a body, it tends to be ignored
# by servers (http://stackoverflow.com/questions/299628), and webapp2
# ignores it as well, so we have to put the params in the URL.
self.testapp.delete('/?' + csrf_param, status=200)
if __name__ == '__main__':
unittest.main()
| samking/code-the-change-projects | ctc/helpers/csrf_test.py | Python | apache-2.0 | 5,296 |
// Copyright (c) 2015-2016 Yuya Ochiai
// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
import AutoLaunch from 'auto-launch';
import {app} from 'electron';
function shouldQuitApp(cmd) {
if (process.platform !== 'win32') {
return false;
}
const squirrelCommands = ['--squirrel-install', '--squirrel-updated', '--squirrel-uninstall', '--squirrel-obsolete'];
return squirrelCommands.includes(cmd);
}
async function setupAutoLaunch(cmd) {
const appLauncher = new AutoLaunch({
name: app.getName(),
isHidden: true,
});
if (cmd === '--squirrel-uninstall') {
// If we're uninstalling, make sure we also delete our auto launch registry key
await appLauncher.disable();
} else if (cmd === '--squirrel-install' || cmd === '--squirrel-updated') {
// If we're updating and already have an registry entry for auto launch, make sure to update the path
const enabled = await appLauncher.isEnabled();
if (enabled) {
await appLauncher.enable();
}
}
}
export default function squirrelStartup(callback) {
if (process.platform === 'win32') {
const cmd = process.argv[1];
setupAutoLaunch(cmd).then(() => {
if (require('electron-squirrel-startup') && callback) { // eslint-disable-line global-require
callback();
}
});
return shouldQuitApp(cmd);
}
return false;
}
| yuya-oc/desktop | src/main/squirrelStartup.js | JavaScript | apache-2.0 | 1,409 |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.location.suplclient.asn1.supl2.lpp;
// Copyright 2008 Google Inc. All Rights Reserved.
/*
* This class is AUTOMATICALLY GENERATED. Do NOT EDIT.
*/
//
//
import com.google.location.suplclient.asn1.base.Asn1Sequence;
import com.google.location.suplclient.asn1.base.Asn1Tag;
import com.google.location.suplclient.asn1.base.BitStream;
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.SequenceComponent;
import com.google.common.collect.ImmutableList;
import java.util.Collection;
import javax.annotation.Nullable;
/**
*
*/
public class GNSS_RealTimeIntegrityReq extends Asn1Sequence {
//
private static final Asn1Tag TAG_GNSS_RealTimeIntegrityReq
= Asn1Tag.fromClassAndNumber(-1, -1);
public GNSS_RealTimeIntegrityReq() {
super();
}
@Override
@Nullable
protected Asn1Tag getTag() {
return TAG_GNSS_RealTimeIntegrityReq;
}
@Override
protected boolean isTagImplicit() {
return true;
}
public static Collection<Asn1Tag> getPossibleFirstTags() {
if (TAG_GNSS_RealTimeIntegrityReq != null) {
return ImmutableList.of(TAG_GNSS_RealTimeIntegrityReq);
} else {
return Asn1Sequence.getPossibleFirstTags();
}
}
/**
* Creates a new GNSS_RealTimeIntegrityReq from encoded stream.
*/
public static GNSS_RealTimeIntegrityReq fromPerUnaligned(byte[] encodedBytes) {
GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq();
result.decodePerUnaligned(new BitStreamReader(encodedBytes));
return result;
}
/**
* Creates a new GNSS_RealTimeIntegrityReq from encoded stream.
*/
public static GNSS_RealTimeIntegrityReq fromPerAligned(byte[] encodedBytes) {
GNSS_RealTimeIntegrityReq result = new GNSS_RealTimeIntegrityReq();
result.decodePerAligned(new BitStreamReader(encodedBytes));
return result;
}
@Override protected boolean isExtensible() {
return true;
}
@Override public boolean containsExtensionValues() {
for (SequenceComponent extensionComponent : getExtensionComponents()) {
if (extensionComponent.isExplicitlySet()) return true;
}
return false;
}
@Override public Iterable<? extends SequenceComponent> getComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
return builder.build();
}
@Override public Iterable<? extends SequenceComponent>
getExtensionComponents() {
ImmutableList.Builder<SequenceComponent> builder = ImmutableList.builder();
return builder.build();
}
@Override public Iterable<BitStream> encodePerUnaligned() {
return super.encodePerUnaligned();
}
@Override public Iterable<BitStream> encodePerAligned() {
return super.encodePerAligned();
}
@Override public void decodePerUnaligned(BitStreamReader reader) {
super.decodePerUnaligned(reader);
}
@Override public void decodePerAligned(BitStreamReader reader) {
super.decodePerAligned(reader);
}
@Override public String toString() {
return toIndentedString("");
}
public String toIndentedString(String indent) {
StringBuilder builder = new StringBuilder();
builder.append("GNSS_RealTimeIntegrityReq = {\n");
final String internalIndent = indent + " ";
for (SequenceComponent component : getComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
if (isExtensible()) {
builder.append(internalIndent).append("...\n");
for (SequenceComponent component : getExtensionComponents()) {
if (component.isExplicitlySet()) {
builder.append(internalIndent)
.append(component.toIndentedString(internalIndent));
}
}
}
builder.append(indent).append("};\n");
return builder.toString();
}
}
| google/supl-client | src/main/java/com/google/location/suplclient/asn1/supl2/lpp/GNSS_RealTimeIntegrityReq.java | Java | apache-2.0 | 4,587 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package modelo.formularios;
import controlador.dbConnection;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import javax.swing.JOptionPane;
/**
*
* @author Eisner López Acevedo <eisner.lopez at gmail.com>
*/
public class Interfaz_Factura {
private final dbConnection myLink = new dbConnection();
private final Connection conexion = dbConnection.getConnection();
private String querySQL = "";
ResultSet rs = null;
PreparedStatement pst = null;
public boolean mostrarFactura(String Buscar) {
String[] registro = new String[8];
querySQL
= "SELECT `factura_cabina`.`factura_id`, "
+ "`factura_cabina`.`cant_dia`, "
+ "`factura_cabina`.`fecha`, "
+ "`factura_cabina`.`impuesto_cabina`, "
+ "`factura_cabina`.`precio_total_cabina`, "
+ "`factura_cabina`.`cabina_cabina_id`, "
+ "`factura_cabina`.`colaborador_empleado_id`, "
+ "`factura_cabina`.`numero_factura`"
+ "FROM `pct3`.`factura_cabina`"
+ "WHERE "
+ "`factura_cabina`.`numero_factura` = '" + Buscar + "'"
+ "order by `factura_cabina`.`numero_factura`;";
try {
Statement st = conexion.createStatement();
rs = st.executeQuery(querySQL);
while (rs.next()) {
registro[0] = rs.getString(1);
registro[1] = rs.getString(2);
registro[2] = rs.getString(3);
registro[3] = rs.getString(4);
registro[4] = rs.getString(5);
registro[5] = rs.getString(6);
registro[6] = rs.getString(7);
registro[7] = rs.getString(8);
}
} catch (SQLException sqle) {
JOptionPane.showConfirmDialog(null, sqle);
}
return false;
}
}
| eisnerh/PCT_315 | TropiCabinas/src/modelo/formularios/Interfaz_Factura.java | Java | apache-2.0 | 2,200 |
package com.ihtsdo.snomed.model.xml;
import java.sql.Date;
import javax.xml.bind.annotation.XmlRootElement;
import com.google.common.base.Objects;
import com.google.common.primitives.Longs;
import com.ihtsdo.snomed.dto.refset.RefsetDto;
import com.ihtsdo.snomed.model.refset.Refset;
@XmlRootElement(name="refset")
public class RefsetDtoShort {
private long id;
private XmlRefsetConcept concept;
private String publicId;
private String title;
private String description;
private Date created;
private Date lastModified;
private int memberSize;
private String snomedExtension;
private String snomedReleaseDate;
private boolean pendingChanges;
public RefsetDtoShort(Refset r){
setId(r.getId());
setConcept(new XmlRefsetConcept(r.getRefsetConcept()));
setPublicId(r.getPublicId());
setTitle(r.getTitle());
setDescription(r.getDescription());
setCreated(r.getCreationTime());
setLastModified(r.getModificationTime());
setPendingChanges(r.isPendingChanges());
setMemberSize(r.getMemberSize());
setSnomedExtension(r.getOntologyVersion().getFlavour().getPublicId());
setSnomedReleaseDate(RefsetDto.dateFormat.format(r.getOntologyVersion().getTaggedOn()));
}
public RefsetDtoShort(){}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("id", getId())
.add("concept", getConcept())
.add("publicId", getPublicId())
.add("title", getTitle())
.add("description", getDescription())
.add("created", getCreated())
.add("lastModified", getLastModified())
.add("pendingChanges", isPendingChanges())
.add("memberSize", getMemberSize())
.add("snomedExtension", getSnomedExtension())
.add("snomedReleaseDate", getSnomedReleaseDate())
.toString();
}
@Override
public int hashCode(){
return Longs.hashCode(getId());
}
@Override
public boolean equals(Object o){
if (o instanceof RefsetDtoShort){
RefsetDtoShort r = (RefsetDtoShort) o;
if (r.getId() == this.getId()){
return true;
}
}
return false;
}
public boolean isPendingChanges() {
return pendingChanges;
}
public void setPendingChanges(boolean pendingChanges) {
this.pendingChanges = pendingChanges;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public XmlRefsetConcept getConcept() {
return concept;
}
public void setConcept(XmlRefsetConcept concept) {
this.concept = concept;
}
public String getPublicId() {
return publicId;
}
public void setPublicId(String publicId) {
this.publicId = publicId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
public Date getLastModified() {
return lastModified;
}
public void setLastModified(Date lastModified) {
this.lastModified = lastModified;
}
public int getMemberSize() {
return memberSize;
}
public void setMemberSize(int memberSize) {
this.memberSize = memberSize;
}
public String getSnomedExtension() {
return snomedExtension;
}
public void setSnomedExtension(String snomedExtension) {
this.snomedExtension = snomedExtension;
}
public String getSnomedReleaseDate() {
return snomedReleaseDate;
}
public void setSnomedReleaseDate(String snomedReleaseDate) {
this.snomedReleaseDate = snomedReleaseDate;
}
public static RefsetDtoShort parse(Refset r){
return getBuilder(new XmlRefsetConcept(r.getRefsetConcept()),
r.getPublicId(),
r.getTitle(),
r.getDescription(),
r.getCreationTime(),
r.getModificationTime(),
r.isPendingChanges(),
r.getMemberSize(),
r.getOntologyVersion().getFlavour().getPublicId(),
r.getOntologyVersion().getTaggedOn()).build();
}
public static Builder getBuilder(XmlRefsetConcept concept, String publicId, String title,
String description, Date created, Date lastModified, boolean pendingChanges, int memberSize,
String snomedExtension, Date snomedReleaseDate) {
return new Builder(concept, publicId, title, description, created, lastModified, pendingChanges,
memberSize, snomedExtension, snomedReleaseDate);
}
public static class Builder {
private RefsetDtoShort built;
Builder(XmlRefsetConcept concept, String publicId, String title, String description,
Date created, Date lastModified, boolean pendingChanges, int memberSize,
String snomedExtension, Date snomedReleaseDate){
built = new RefsetDtoShort();
built.concept = concept;
built.publicId = publicId;
built.title = title;
built.description = description;
built.created = created;
built.lastModified = lastModified;
built.pendingChanges = pendingChanges;
built.memberSize = memberSize;
built.setSnomedExtension(snomedExtension);
built.setSnomedReleaseDate(RefsetDto.dateFormat.format(snomedReleaseDate));
}
public RefsetDtoShort build() {
return built;
}
}
}
| IHTSDO/snomed-publish | model/src/main/java/com/ihtsdo/snomed/model/xml/RefsetDtoShort.java | Java | apache-2.0 | 6,129 |
package com.fuyoul.sanwenseller.bean.pickerview;
import java.util.List;
public class ProvinceModel implements IPickerViewData {
private String name;
private List<CityModel> cityList;
@Override
public String getPickerViewText() {
return name;
}
public ProvinceModel() {
super();
}
public ProvinceModel(String name, List<CityModel> cityList) {
super();
this.name = name;
this.cityList = cityList;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public List<CityModel> getCityList() {
return cityList;
}
public void setCityList(List<CityModel> cityList) {
this.cityList = cityList;
}
@Override
public String toString() {
return "ProvinceModel [name=" + name + ", cityList=" + cityList + "]";
}
}
| newbieandroid/AppBase | app/src/main/java/com/fuyoul/sanwenseller/bean/pickerview/ProvinceModel.java | Java | apache-2.0 | 915 |
/*
* Copyright 2014-2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.metrics.api.jaxrs.handler;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_XHTML_XML;
import static javax.ws.rs.core.MediaType.TEXT_HTML;
import com.wordnik.swagger.annotations.ApiOperation;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
/**
* @author mwringe
*/
@Path("/")
public class BaseHandler {
public static final String PATH = "/";
@GET
@Produces(APPLICATION_JSON)
@ApiOperation(value = "Returns some basic information about the Hawkular Metrics service.",
response = String.class, responseContainer = "Map")
public Response baseJSON(@Context ServletContext context) {
String version = context.getInitParameter("hawkular.metrics.version");
if (version == null) {
version = "undefined";
}
HawkularMetricsBase hawkularMetrics = new HawkularMetricsBase();
hawkularMetrics.version = version;
return Response.ok(hawkularMetrics).build();
}
@GET
@Produces({APPLICATION_XHTML_XML, TEXT_HTML})
public void baseHTML(@Context ServletContext context) throws Exception {
HttpServletRequest request = ResteasyProviderFactory.getContextData(HttpServletRequest.class);
HttpServletResponse response = ResteasyProviderFactory.getContextData(HttpServletResponse.class);
request.getRequestDispatcher("/static/index.html").forward(request,response);
}
private class HawkularMetricsBase {
String name = "Hawkular-Metrics";
String version;
public String getName() {
return name;
}
public void setVersion(String version) {
this.version = version;
}
public String getVersion() {
return version;
}
}
} | 140293816/Hawkular-fork | api/metrics-api-jaxrs/src/main/java/org/hawkular/metrics/api/jaxrs/handler/BaseHandler.java | Java | apache-2.0 | 2,802 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Dennis Ushakov
*/
package javax.accessibility;
import com.gaecompat.javax.swing.text.AttributeSet;
import com.google.code.appengine.awt.Point;
import com.google.code.appengine.awt.Rectangle;
public interface AccessibleText {
static final int CHARACTER = 1;
static final int WORD = 2;
static final int SENTENCE = 3;
int getIndexAtPoint(Point p);
Rectangle getCharacterBounds(int i);
int getCharCount();
int getCaretPosition();
String getAtIndex(int part, int index);
String getAfterIndex(int part, int index);
String getBeforeIndex(int part, int index);
AttributeSet getCharacterAttribute(int i);
int getSelectionStart();
int getSelectionEnd();
String getSelectedText();
}
| mike10004/appengine-imaging | gaecompat-awt-imaging/src/common/javax/accessibility/AccessibleText.java | Java | apache-2.0 | 1,610 |
/**
* Copyright 2015-2016 Maven Source Dependencies
* Plugin contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.l2x6.srcdeps.core.shell;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import org.l2x6.srcdeps.core.util.SrcdepsCoreUtils;
/**
* A definition of a shell command that can be executed by {@link Shell#execute(ShellCommand)}.
*
* @author <a href="https://github.com/ppalaga">Peter Palaga</a>
*/
public class ShellCommand {
private final List<String> arguments;
private final Map<String, String> environment;
private final String executable;
private final IoRedirects ioRedirects;
private final long timeoutMs;
private final Path workingDirectory;
public ShellCommand(String executable, List<String> arguments, Path workingDirectory,
Map<String, String> environment, IoRedirects ioRedirects, long timeoutMs) {
super();
SrcdepsCoreUtils.assertArgNotNull(executable, "executable");
SrcdepsCoreUtils.assertArgNotNull(arguments, "arguments");
SrcdepsCoreUtils.assertArgNotNull(workingDirectory, "workingDirectory");
SrcdepsCoreUtils.assertArgNotNull(environment, "environment");
SrcdepsCoreUtils.assertArgNotNull(ioRedirects, "ioRedirects");
this.executable = executable;
this.arguments = arguments;
this.workingDirectory = workingDirectory;
this.environment = environment;
this.ioRedirects = ioRedirects;
this.timeoutMs = timeoutMs;
}
/**
* @return an array containing the executable and its arguments that can be passed e.g. to
* {@link ProcessBuilder#command(String...)}
*/
public String[] asCmdArray() {
String[] result = new String[arguments.size() + 1];
int i = 0;
result[i++] = executable;
for (String arg : arguments) {
result[i++] = arg;
}
return result;
}
/**
* @return the {@link List} arguments for the executable. Cannot be {@code null}.
*/
public List<String> getArguments() {
return arguments;
}
/**
* @return a {@link Map} of environment variables that should be used when executing this {@link ShellCommand}.
* Cannot be {@code null}. Note that these are just overlay variables - when a new {@link Process} is
* spawned, the environment is copied from the present process and only the variables the provided by the
* present method are overwritten.
*/
public Map<String, String> getEnvironment() {
return environment;
}
/**
* @return the executable file that should be called
*/
public String getExecutable() {
return executable;
}
/**
* @return the {@link IoRedirects} to use when the {@link Shell} spawns a new {@link Process}
*/
public IoRedirects getIoRedirects() {
return ioRedirects;
}
/**
* @return timeout in milliseconds
*/
public long getTimeoutMs() {
return timeoutMs;
}
/**
* @return the directory in which this {@link ShellCommand} should be executed
*/
public Path getWorkingDirectory() {
return workingDirectory;
}
}
| jpkrohling/srcdeps-maven-plugin | srcdeps-core/src/main/java/org/l2x6/srcdeps/core/shell/ShellCommand.java | Java | apache-2.0 | 3,821 |
/*
* Powered By agile
* Web Site: http://www.agile.com
* Since 2008 - 2016
*/
package persistent.prestige.modules.edu.service;
import java.util.Map;
/**
* Organization service类
* @author 雅居乐 2016-9-10 22:28:24
* @version 1.0
*/
public interface OrganizationService{
/**
* 保存信息
* @param datas
* @return
*/
Integer saveOrganization(Map datas);
}
| dingwpmz/Mycat-Demo | src/main/java/persistent/prestige/modules/edu/service/OrganizationService.java | Java | apache-2.0 | 381 |
/**
--| ADAPTIVE RUNTIME PLATFORM |----------------------------------------------------------------------------------------
(C) Copyright 2013-2015 Carlos Lozano Diez t/a Adaptive.me <http://adaptive.me>.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 . Unless required by appli-
-cable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
Original author:
* Carlos Lozano Diez
<http://github.com/carloslozano>
<http://twitter.com/adaptivecoder>
<mailto:carlos@adaptive.me>
Contributors:
* Ferran Vila Conesa
<http://github.com/fnva>
<http://twitter.com/ferran_vila>
<mailto:ferran.vila.conesa@gmail.com>
* See source code files for contributors.
Release:
* @version v2.2.15
-------------------------------------------| aut inveniam viam aut faciam |--------------------------------------------
*/
using System;
namespace Adaptive.Arp.Api
{
/**
Enumeration IAdaptiveRPGroup
*/
public enum IAdaptiveRPGroup {
Application,
Commerce,
Communication,
Data,
Media,
Notification,
PIM,
Reader,
Security,
Sensor,
Social,
System,
UI,
Util,
Kernel,
Unknown
}
}
| AdaptiveMe/adaptive-arp-api-lib-dotnet | adaptive-arp-api/Sources/Adaptive.Arp.Api/IAdaptiveRPGroup.cs | C# | apache-2.0 | 1,735 |
/*
*
* * Copyright (c) 2011-2015 EPFL DATA Laboratory
* * Copyright (c) 2014-2015 The Squall Collaboration (see NOTICE)
* *
* * All rights reserved.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package ch.epfl.data.squall.components.dbtoaster;
import backtype.storm.Config;
import backtype.storm.topology.TopologyBuilder;
import ch.epfl.data.squall.components.Component;
import ch.epfl.data.squall.components.JoinerComponent;
import ch.epfl.data.squall.components.AbstractJoinerComponent;
import ch.epfl.data.squall.operators.AggregateStream;
import ch.epfl.data.squall.predicates.Predicate;
import ch.epfl.data.squall.storm_components.StormComponent;
import ch.epfl.data.squall.storm_components.dbtoaster.StormDBToasterJoin;
import ch.epfl.data.squall.storm_components.synchronization.TopologyKiller;
import ch.epfl.data.squall.types.Type;
import ch.epfl.data.squall.utilities.MyUtilities;
import org.apache.log4j.Logger;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class DBToasterJoinComponent extends AbstractJoinerComponent<DBToasterJoinComponent> {
protected DBToasterJoinComponent getThis() {
return this;
}
private static final long serialVersionUID = 1L;
private static Logger LOG = Logger.getLogger(DBToasterJoinComponent.class);
private Map<String, Type[]> _parentNameColTypes;
private Set<String> _parentsWithMultiplicity;
private Map<String, AggregateStream> _parentsWithAggregator;
private String _equivalentSQL;
protected DBToasterJoinComponent(List<Component> relations, Map<String, Type[]> relationTypes,
Set<String> relationsWithMultiplicity, Map<String, AggregateStream> relationsWithAggregator,
String sql, String name) {
super(relations, name);
_parentsWithMultiplicity = relationsWithMultiplicity;
_parentsWithAggregator = relationsWithAggregator;
_parentNameColTypes = relationTypes;
_equivalentSQL = sql;
}
@Override
public void makeBolts(TopologyBuilder builder, TopologyKiller killer,
List<String> allCompNames, Config conf, int hierarchyPosition) {
// by default print out for the last component
// for other conditions, can be set via setPrintOut
if (hierarchyPosition == StormComponent.FINAL_COMPONENT
&& !getPrintOutSet())
setPrintOut(true);
MyUtilities.checkBatchOutput(getBatchOutputMillis(),
getChainOperator().getAggregation(), conf);
setStormEmitter(new StormDBToasterJoin(getParents(), this,
allCompNames,
_parentNameColTypes,
_parentsWithMultiplicity,
_parentsWithAggregator,
hierarchyPosition,
builder, killer, conf));
}
@Override
public DBToasterJoinComponent setJoinPredicate(Predicate predicate) {
throw new UnsupportedOperationException();
}
public String getSQLQuery() {
return _equivalentSQL;
}
}
| akathorn/squall | squall-core/src/main/java/ch/epfl/data/squall/components/dbtoaster/DBToasterJoinComponent.java | Java | apache-2.0 | 3,862 |
[
{
"title": "Ventes véhicules",
"url": "",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/car2.jpg",
"language": "fr-FR",
"order": "1",
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "published",
"children": [
{
"title": "http://portail.inetpsa.com/sites/gpmobile/PublishingImages/car2.jpg",
"url": "http://portail.inetpsa.com/sites/gpmobile/PublishingImages/car2.jpg",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/car2.jpg",
"language": "fr-FR",
"order": 0,
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "published"
},
{
"title": "Ventes véhicules Peugeot",
"url": "https://docinfogroupe.psa-peugeot-citroen.com/ead/dom/1000788899.fd",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/car2.jpg",
"language": "fr-FR",
"order": 1,
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "edited"
},
{
"title": "Ventes véhicules DS",
"url": "https://docinfogroupe.psa-peugeot-citroen.com/ead/dom/1000779116.fd",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/car2.jpg",
"language": "fr-FR",
"order": 2,
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "published"
}
]
},
{
"title": "Webmail",
"url": "https://webmail.mpsa.com/",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/webmail.jpg",
"language": "en-US",
"order": "0",
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "published",
"children": []
},
{
"title": "Net'RH congés",
"url": "https://fr-rh.mpsa.com/rib00/ribdnins.nsf/fc583fb6947d633ac12569450031fbd2/755dcfb785421c6cc1256c7f003107c2?OpenDocument",
"imageUrl": "http://lim.local.inetpsa.com/application/images/service/holiday.jpg",
"language": "fr-FR",
"order": "3",
"workplace": "All",
"department": "All",
"categoryPro": "All",
"status": "unpublished",
"children": []
}
] | florentbruel/livein | services.js | JavaScript | apache-2.0 | 2,378 |
/*
* Copyright 2009-2013 Aarhus University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.brics.tajs.analysis;
import dk.brics.tajs.flowgraph.BasicBlock;
import dk.brics.tajs.lattice.CallEdge;
import dk.brics.tajs.solver.CallGraph;
import dk.brics.tajs.solver.IWorkListStrategy;
/**
* Work list strategy.
*/
public class WorkListStrategy implements IWorkListStrategy<Context> {
private CallGraph<State,Context,CallEdge<State>> call_graph;
/**
* Constructs a new WorkListStrategy object.
*/
public WorkListStrategy() {}
/**
* Sets the call graph.
*/
public void setCallGraph(CallGraph<State,Context,CallEdge<State>> call_graph) {
this.call_graph = call_graph;
}
@Override
public int compare(IEntry<Context> e1, IEntry<Context> e2) {
BasicBlock n1 = e1.getBlock();
BasicBlock n2 = e2.getBlock();
int serial1 = e1.getSerial();
int serial2 = e2.getSerial();
if (serial1 == serial2)
return 0;
final int E1_FIRST = -1;
final int E2_FIRST = 1;
if (n1.getFunction().equals(n2.getFunction()) && e1.getContext().equals(e2.getContext())) {
// same function and same context: use block order
if (n1.getOrder() < n2.getOrder())
return E1_FIRST;
else if (n2.getOrder() < n1.getOrder())
return E2_FIRST;
}
int function_context_order1 = call_graph.getBlockContextOrder(e1.getContext().getEntryBlockAndContext());
int function_context_order2 = call_graph.getBlockContextOrder(e2.getContext().getEntryBlockAndContext());
// different function/context: order by occurrence number
if (function_context_order1 < function_context_order2)
return E2_FIRST;
else if (function_context_order2 < function_context_order1)
return E1_FIRST;
// strategy: breadth first
return serial1 - serial2;
}
}
| cursem/ScriptCompressor | ScriptCompressor1.0/src/dk/brics/tajs/analysis/WorkListStrategy.java | Java | apache-2.0 | 2,303 |
#include "firestore/src/swig/equality_compare.h"
namespace {
template <typename T>
bool EqualityCompareHelper(const T* lhs, const T* rhs) {
return lhs == rhs || (lhs != nullptr && rhs != nullptr && *lhs == *rhs);
}
} // namespace
namespace firebase {
namespace firestore {
namespace csharp {
bool QueryEquals(const Query* lhs, const Query* rhs) {
return EqualityCompareHelper(lhs, rhs);
}
bool QuerySnapshotEquals(const QuerySnapshot* lhs, const QuerySnapshot* rhs) {
return EqualityCompareHelper(lhs, rhs);
}
bool DocumentSnapshotEquals(const DocumentSnapshot* lhs,
const DocumentSnapshot* rhs) {
return EqualityCompareHelper(lhs, rhs);
}
bool DocumentChangeEquals(const DocumentChange* lhs,
const DocumentChange* rhs) {
return EqualityCompareHelper(lhs, rhs);
}
} // namespace csharp
} // namespace firestore
} // namespace firebase
| firebase/firebase-unity-sdk | firestore/src/swig/equality_compare.cc | C++ | apache-2.0 | 912 |
package at.ac.tuwien.dsg.pm.resources;
import at.ac.tuwien.dsg.pm.PeerManager;
import at.ac.tuwien.dsg.pm.model.Collective;
import at.ac.tuwien.dsg.smartcom.model.CollectiveInfo;
import at.ac.tuwien.dsg.smartcom.model.Identifier;
import javax.inject.Inject;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.List;
/**
* @author Philipp Zeppezauer (philipp.zeppezauer@gmail.com)
* @version 1.0
*/
@Path("collectiveInfo")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class CollectiveInfoResource {
@Inject
private PeerManager manager;
@GET
@Path("/{id}")
public CollectiveInfo getCollectiveInfo(@PathParam("id") String id) {
Collective collective = manager.getCollective(id);
if (collective == null) {
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND).build());
}
CollectiveInfo info = new CollectiveInfo();
info.setId(Identifier.collective(id));
info.setDeliveryPolicy(collective.getDeliveryPolicy());
List<Identifier> peers = new ArrayList<>(collective.getPeers().size());
for (String s : collective.getPeers()) {
peers.add(Identifier.peer(s));
}
info.setPeers(peers);
return info;
}
}
| PhilZeppe/CaaS | pm/src/main/java/at/ac/tuwien/dsg/pm/resources/CollectiveInfoResource.java | Java | apache-2.0 | 1,385 |
package com.earlysleep.model;
import org.litepal.crud.DataSupport;
import java.util.ArrayList;
import java.util.List;
/**
* Created by zml on 2016/6/23.
* 介绍:
*/
public class AllData extends DataSupport {
private String music;
private int musictime;
private boolean musicchosse;
List<TimeSeting> list=new ArrayList<>();
}
| 642638112/-1.0 | EarlySleep/app/src/main/java/com/earlysleep/model/AllData.java | Java | apache-2.0 | 353 |
# frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/access_approval/v1/access_approval"
require "google/cloud/access_approval/v1/version"
module Google
module Cloud
module AccessApproval
##
# To load this package, including all its services, and instantiate a client:
#
# @example
#
# require "google/cloud/access_approval/v1"
# client = ::Google::Cloud::AccessApproval::V1::AccessApproval::Client.new
#
module V1
end
end
end
end
helper_path = ::File.join __dir__, "v1", "_helpers.rb"
require "google/cloud/access_approval/v1/_helpers" if ::File.file? helper_path
| googleapis/google-cloud-ruby | google-cloud-access_approval-v1/lib/google/cloud/access_approval/v1.rb | Ruby | apache-2.0 | 1,279 |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.andes.server.handler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.andes.AMQException;
import org.wso2.andes.amqp.AMQPUtils;
import org.wso2.andes.exchange.ExchangeDefaults;
import org.wso2.andes.framing.AMQShortString;
import org.wso2.andes.framing.BasicPublishBody;
import org.wso2.andes.framing.abstraction.MessagePublishInfo;
import org.wso2.andes.protocol.AMQConstant;
import org.wso2.andes.server.AMQChannel;
import org.wso2.andes.server.exchange.Exchange;
import org.wso2.andes.server.protocol.AMQProtocolSession;
import org.wso2.andes.server.state.AMQStateManager;
import org.wso2.andes.server.state.StateAwareMethodListener;
import org.wso2.andes.server.virtualhost.VirtualHost;
public class BasicPublishMethodHandler implements StateAwareMethodListener<BasicPublishBody>
{
private static final Log _logger = LogFactory.getLog(BasicPublishMethodHandler.class);
private static final BasicPublishMethodHandler _instance = new BasicPublishMethodHandler();
public static BasicPublishMethodHandler getInstance()
{
return _instance;
}
private BasicPublishMethodHandler()
{
}
public void methodReceived(AMQStateManager stateManager, BasicPublishBody body, int channelId) throws AMQException
{
AMQProtocolSession session = stateManager.getProtocolSession();
if (_logger.isDebugEnabled())
{
_logger.debug("Publish received on channel " + channelId);
}
AMQShortString exchangeName = body.getExchange();
// TODO: check the delivery tag field details - is it unique across the broker or per subscriber?
if (exchangeName == null)
{
exchangeName = ExchangeDefaults.DEFAULT_EXCHANGE_NAME;
}
VirtualHost vHost = session.getVirtualHost();
Exchange exch = vHost.getExchangeRegistry().getExchange(exchangeName);
// if the exchange does not exist we raise a channel exception
if (exch == null)
{
throw body.getChannelException(AMQConstant.NOT_FOUND, "Unknown exchange name");
}
else
{
// The partially populated BasicDeliver frame plus the received route body
// is stored in the channel. Once the final body frame has been received
// it is routed to the exchange.
AMQChannel channel = session.getChannel(channelId);
if (channel == null)
{
throw body.getChannelNotFoundException(channelId);
}
MessagePublishInfo info = session.getMethodRegistry().getProtocolVersionMethodConverter().convertToInfo(body);
if (ExchangeDefaults.TOPIC_EXCHANGE_NAME.equals(exchangeName)
&& AMQPUtils.isWildCardDestination(info.getRoutingKey().toString())) {
throw body.getChannelException(AMQConstant.INVALID_ROUTING_KEY, "Publishing messages to a wildcard "
+ "destination is not allowed");
}
info.setExchange(exchangeName);
channel.setPublishFrame(info, exch);
}
}
}
| wso2/andes | modules/andes-core/broker/src/main/java/org/wso2/andes/server/handler/BasicPublishMethodHandler.java | Java | apache-2.0 | 3,860 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.policy.mgt.core.task;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.context.PrivilegedCarbonContext;
import org.wso2.carbon.ntask.common.TaskException;
import org.wso2.carbon.ntask.core.TaskInfo;
import org.wso2.carbon.ntask.core.TaskManager;
import org.wso2.carbon.ntask.core.service.TaskService;
import org.wso2.carbon.policy.mgt.common.PolicyMonitoringTaskException;
import org.wso2.carbon.policy.mgt.core.internal.PolicyManagementDataHolder;
import org.wso2.carbon.policy.mgt.core.util.PolicyManagementConstants;
import org.wso2.carbon.policy.mgt.core.util.PolicyManagerUtil;
import org.wso2.carbon.ntask.core.TaskInfo.TriggerInfo;
import java.util.HashMap;
import java.util.Map;
public class TaskScheduleServiceImpl implements TaskScheduleService {
private static Log log = LogFactory.getLog(TaskScheduleServiceImpl.class);
@Override
public void startTask(int monitoringFrequency) throws PolicyMonitoringTaskException {
if (monitoringFrequency <= 0) {
throw new PolicyMonitoringTaskException("Time interval cannot be 0 or less than 0.");
}
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
taskService.registerTaskType(PolicyManagementConstants.TASK_TYPE);
if (log.isDebugEnabled()) {
log.debug("Monitoring task is started for the tenant id " + tenantId);
}
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
TriggerInfo triggerInfo = new TriggerInfo();
triggerInfo.setIntervalMillis(monitoringFrequency);
triggerInfo.setRepeatCount(-1);
Map<String, String> properties = new HashMap<>();
properties.put(PolicyManagementConstants.TENANT_ID, String.valueOf(tenantId));
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo);
taskManager.registerTask(taskInfo);
taskManager.rescheduleTask(taskInfo.getName());
} catch (TaskException e) {
String msg = "Error occurred while creating the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
@Override
public void stopTask() throws PolicyMonitoringTaskException {
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
taskManager.deleteTask(taskName);
} catch (TaskException e) {
String msg = "Error occurred while deleting the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
@Override
public void updateTask(int monitoringFrequency) throws PolicyMonitoringTaskException {
try {
int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId();
String taskName = PolicyManagementConstants.TASK_NAME + "_" + String.valueOf(tenantId);
TaskService taskService = PolicyManagementDataHolder.getInstance().getTaskService();
TaskManager taskManager = taskService.getTaskManager(PolicyManagementConstants.TASK_TYPE);
taskManager.deleteTask(taskName);
TriggerInfo triggerInfo = new TriggerInfo();
triggerInfo.setIntervalMillis(monitoringFrequency);
triggerInfo.setRepeatCount(-1);
Map<String, String> properties = new HashMap<>();
properties.put("tenantId", String.valueOf(tenantId));
TaskInfo taskInfo = new TaskInfo(taskName, PolicyManagementConstants.TASK_CLAZZ, properties, triggerInfo);
taskManager.registerTask(taskInfo);
taskManager.rescheduleTask(taskInfo.getName());
} catch (TaskException e) {
String msg = "Error occurred while updating the task for tenant " + PrivilegedCarbonContext.
getThreadLocalCarbonContext().getTenantId();
log.error(msg, e);
throw new PolicyMonitoringTaskException(msg, e);
}
}
}
| charithag/carbon-device-mgt-framework | components/policy-mgt/org.wso2.carbon.policy.mgt.core/src/main/java/org/wso2/carbon/policy/mgt/core/task/TaskScheduleServiceImpl.java | Java | apache-2.0 | 5,611 |
package org.liveontologies.protege.justification.proof.preferences;
/*-
* #%L
* Protege Proof Justification
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2016 - 2017 Live Ontologies Project
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.eclipse.core.runtime.IExtension;
import org.protege.editor.core.editorkit.EditorKit;
import org.protege.editor.core.plugin.AbstractPluginLoader;
public class ProofPreferencesPanelPluginLoader extends AbstractPluginLoader<ProofPreferencesPanelPlugin> {
private final EditorKit kit;
private static final String ID = "JustificationProofPreferences";
private static final String KEY = "org.liveontologies.protege.justification.proof";
public ProofPreferencesPanelPluginLoader(EditorKit kit) {
super(KEY, ID);
this.kit = kit;
}
@Override
protected ProofPreferencesPanelPlugin createInstance(IExtension extension) {
return new ProofPreferencesPanelPlugin(kit, extension);
}
}
| liveontologies/protege-proof-justification | src/main/java/org/liveontologies/protege/justification/proof/preferences/ProofPreferencesPanelPluginLoader.java | Java | apache-2.0 | 1,491 |
# encoding: utf-8
u'''MCL — Publication Folder'''
from ._base import IIngestableFolder, Ingestor, IngestableFolderView
from .interfaces import IPublication
from five import grok
class IPublicationFolder(IIngestableFolder):
u'''Folder containing publications.'''
class PublicationIngestor(Ingestor):
u'''RDF ingestor for publication.'''
grok.context(IPublicationFolder)
def getContainedObjectInterface(self):
return IPublication
class View(IngestableFolderView):
u'''View for an publication folder'''
grok.context(IPublicationFolder)
| MCLConsortium/mcl-site | src/jpl.mcl.site.knowledge/src/jpl/mcl/site/knowledge/publicationfolder.py | Python | apache-2.0 | 575 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.workdocs.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.workdocs.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DeleteFolderRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DeleteFolderRequestProtocolMarshaller implements Marshaller<Request<DeleteFolderRequest>, DeleteFolderRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON).requestUri("/api/v1/folders/{FolderId}")
.httpMethodName(HttpMethodName.DELETE).hasExplicitPayloadMember(false).hasPayloadMembers(false).serviceName("AmazonWorkDocs").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DeleteFolderRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DeleteFolderRequest> marshall(DeleteFolderRequest deleteFolderRequest) {
if (deleteFolderRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DeleteFolderRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
deleteFolderRequest);
protocolMarshaller.startMarshalling();
DeleteFolderRequestMarshaller.getInstance().marshall(deleteFolderRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-workdocs/src/main/java/com/amazonaws/services/workdocs/model/transform/DeleteFolderRequestProtocolMarshaller.java | Java | apache-2.0 | 2,620 |
using System;
using System.IO;
namespace Glass.Mapper.Sc
{
public class RenderingResult: IDisposable
{
private readonly TextWriter _writer;
private readonly string _firstPart;
private readonly string _lastPart;
public RenderingResult(TextWriter writer, string firstPart, string lastPart)
{
_writer = writer;
_firstPart = firstPart;
_lastPart = lastPart;
_writer.Write(_firstPart);
}
public void Dispose()
{
_writer.Write(_lastPart);
}
}
}
| mikeedwards83/Glass.Mapper | Source/Glass.Mapper.Sc/RenderingResult.cs | C# | apache-2.0 | 614 |
/**
* Created by Jacky.Gao on 2017-02-09.
*/
import {alert} from '../MsgBox.js';
export default class EditPropertyConditionDialog{
constructor(conditions){
this.conditions=conditions;
this.dialog=$(`<div class="modal fade" role="dialog" aria-hidden="true" style="z-index: 11001">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">
×
</button>
<h4 class="modal-title">
${window.i18n.dialog.editPropCondition.title}
</h4>
</div>
<div class="modal-body"></div>
<div class="modal-footer"></div>
</div>
</div>
</div>`);
const body=this.dialog.find('.modal-body'),footer=this.dialog.find(".modal-footer");
this.init(body,footer);
}
init(body,footer){
const _this=this;
this.joinGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.relation}</label></div>`);
this.joinSelect=$(`<select class="form-control" style="display: inline-block;width:430px;">
<option value="and">${window.i18n.dialog.editPropCondition.and}</option>
<option value="or">${window.i18n.dialog.editPropCondition.or}</option>
</select>`);
this.joinGroup.append(this.joinSelect);
body.append(this.joinGroup);
const leftGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.leftValue}</label></div>`);
this.leftTypeSelect=$(`<select class="form-control" style="display: inline-block;width: inherit">
<option value="current">${window.i18n.dialog.editPropCondition.currentValue}</option>
<option value="property">${window.i18n.dialog.editPropCondition.property}</option>
<option value="expression">${window.i18n.dialog.editPropCondition.expression}</option>
</select>`);
leftGroup.append(this.leftTypeSelect);
this.propertyGroup=$(`<span style="margin-left: 10px"><label>${window.i18n.dialog.editPropCondition.propName}</label></span>`);
this.propertySelect=$(`<select class="form-control" style="display: inline-block;width:320px;"></select>`);
this.propertyGroup.append(this.propertySelect);
leftGroup.append(this.propertyGroup);
body.append(leftGroup);
this.exprGroup=$(`<span style="margin-left: 10px"><label>${window.i18n.dialog.editPropCondition.expr}</label></span>`);
this.exprEditor=$(`<input type="text" style="display: inline-block;width:320px;" class="form-control">`);
this.exprGroup.append(this.exprEditor);
leftGroup.append(this.exprGroup);
this.exprEditor.change(function(){
const val=$(this).val();
const url=window._server+'/designer/conditionScriptValidation';
$.ajax({
url,
type:'POST',
data:{content:val},
success:function(errors){
if(errors.length>0){
alert(`${val} ${window.i18n.dialog.editPropCondition.syntaxError}`);
}
}
});
});
this.leftTypeSelect.change(function(){
const val=$(this).val();
if(val==='current'){
_this.exprGroup.hide();
_this.propertyGroup.hide();
}else if(val==='property'){
_this.exprGroup.hide();
_this.propertyGroup.show();
}else{
_this.propertyGroup.hide();
_this.exprGroup.show();
}
});
const operatorGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.operator}</label></div>`);
this.operatorSelect=$(`<select class="form-control" style="display: inline-block;width:490px;">
<option value=">">${window.i18n.dialog.editPropCondition.greater}</option>
<option value=">=">${window.i18n.dialog.editPropCondition.greaterEquals}</option>
<option value="<">${window.i18n.dialog.editPropCondition.less}</option>
<option value="<=">${window.i18n.dialog.editPropCondition.lessEquals}</option>
<option value="==">${window.i18n.dialog.editPropCondition.equals}</option>
<option value="!=">${window.i18n.dialog.editPropCondition.notEquals}</option>
<option value="in">${window.i18n.dialog.editPropCondition.in}</option>
<option value="like">${window.i18n.dialog.editPropCondition.like}</option>
</select>`);
operatorGroup.append(this.operatorSelect);
body.append(operatorGroup);
const valueGroup=$(`<div class="form-group"><label>${window.i18n.dialog.editPropCondition.valueExpr}</label></div>`);
this.valueEditor=$(`<input type="text" class="form-control" style="display: inline-block;width:477px;">`);
valueGroup.append(this.valueEditor);
body.append(valueGroup);
this.valueEditor.change(function(){
const val=$(this).val();
const url=window._server+'/designer/conditionScriptValidation';
$.ajax({
url,
type:'POST',
data:{content:val},
success:function(errors){
if(errors.length>0){
alert(`${val} ${window.i18n.dialog.editPropCondition.syntaxError}`);
}
}
});
});
const button=$(`<button class="btn btn-default">${window.i18n.dialog.editPropCondition.ok}</button>`);
button.click(function(){
let property=_this.propertySelect.val(),op=_this.operatorSelect.val(),value=_this.valueEditor.val(),join=_this.joinSelect.val(),type=_this.leftTypeSelect.val(),expr=_this.exprEditor.val();
if (type === 'property') {
if (property === '') {
alert(`${window.i18n.dialog.editPropCondition.selectProp}`);
return;
}
} else if(type==='expression') {
if(expr===''){
alert(`${window.i18n.dialog.editPropCondition.leftValueExpr}`);
return;
}
property=expr;
}else{
property = null;
}
if(type==='current'){
type="property";
}
if (op === '') {
alert(`${window.i18n.dialog.editPropCondition.selectOperator}`);
return;
}
if (value === '') {
alert(`${window.i18n.dialog.editPropCondition.inputExpr}`);
return;
}
if (_this.condition) {
if (_this.condition.join) {
_this.callback.call(_this,type, property, op, value, join);
} else {
_this.callback.call(_this,type, property, op, value);
}
} else if (_this.conditions.length > 0) {
_this.callback.call(_this,type, property, op, value, join);
} else {
_this.callback.call(_this,type, property, op, value);
}
_this.dialog.modal('hide');
});
footer.append(button);
}
show(callback,fields,condition){
this.callback=callback;
this.condition=condition;
this.type='current';
if(condition){
this.type=condition.type;
if(condition.join){
this.joinGroup.show();
}else{
this.joinGroup.hide();
}
}else{
if(this.conditions.length>0){
this.joinGroup.show();
}else{
this.joinGroup.hide();
}
}
this.propertySelect.empty();
for(let field of fields){
this.propertySelect.append(`<option>${field.name}</option>`);
}
if(condition){
if(this.type==='expression'){
this.leftTypeSelect.val("expression");
this.exprEditor.val(condition.left);
this.propertyGroup.hide();
this.exprGroup.show();
}else{
if(condition.left && condition.left!==''){
this.propertySelect.val(condition.left);
this.leftTypeSelect.val("property");
this.propertyGroup.show();
}else{
this.leftTypeSelect.val("current");
this.propertyGroup.hide();
}
this.exprGroup.hide();
}
this.operatorSelect.val(condition.operation || condition.op);
this.valueEditor.val(condition.right);
this.joinSelect.val(condition.join);
}else{
this.leftTypeSelect.val("current");
this.propertyGroup.hide();
this.exprGroup.hide();
}
this.dialog.modal('show');
}
} | youseries/ureport | ureport2-js/src/dialog/EditPropertyConditionDialog.js | JavaScript | apache-2.0 | 9,326 |
from collections import defaultdict
import codecs
def count(corpus, output_file):
debug = False
dic = defaultdict(int)
other = set()
fout = codecs.open(output_file, 'w', 'utf8')
for line in open(corpus, 'r'):
words = line.split()
for word in words:
if len(word) % 3 == 0:
for i in xrange(len(word) / 3):
dic[word[i:i+3]] += 1
else:
other.add(word)
fout.write('%i %i\n' % (len(dic), len(other)))
record_list = [(y, x) for x, y in dic.items()]
record_list.sort()
record_list.reverse()
i = 0
for x, y in record_list:
#print y.decode('utf8'), x
try:
yy = y.decode('GBK')
except:
print y
yy = 'N/A'
fout.write('%s %i\n' % (yy, x))
i += 1
if i > 10 and debug:
break
other_list = list(other)
other_list.sort()
for item in other_list:
#print item.decode('utf8')
item2 = item.decode('utf8')
fout.write(item2)
fout.write('\n')
i += 1
if i > 20 and debug:
break
fout.close()
if __name__ =='__main__':
count('data/train.zh_parsed', 'output/count.zh')
count('data/train.ja_parsed', 'output/count.ja')
| jileiwang/CJ-Glo | tools/character_count.py | Python | apache-2.0 | 1,312 |
/*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.materials.git;
import com.googlecode.junit.ext.JunitExtRunner;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.RevisionContext;
import com.thoughtworks.go.domain.materials.TestSubprocessExecutionContext;
import com.thoughtworks.go.domain.materials.git.GitCommand;
import com.thoughtworks.go.domain.materials.git.GitTestRepo;
import com.thoughtworks.go.domain.materials.mercurial.StringRevision;
import com.thoughtworks.go.helper.TestRepo;
import com.thoughtworks.go.util.SystemEnvironment;
import com.thoughtworks.go.util.TestFileUtil;
import org.hamcrest.Matchers;
import org.hamcrest.core.Is;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.thoughtworks.go.domain.materials.git.GitTestRepo.*;
import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(JunitExtRunner.class)
public class GitMaterialShallowCloneTest {
private GitTestRepo repo;
private File workingDir;
@Before
public void setup() throws Exception {
repo = new GitTestRepo();
workingDir = TestFileUtil.createUniqueTempFolder("working");
}
@After
public void teardown() throws Exception {
TestRepo.internalTearDown();
}
@Test
public void defaultShallowFlagIsOff() throws Exception {
assertThat(new GitMaterial(repo.projectRepositoryUrl()).isShallowClone(), is(false));
assertThat(new GitMaterial(repo.projectRepositoryUrl(), null).isShallowClone(), is(false));
assertThat(new GitMaterial(repo.projectRepositoryUrl(), true).isShallowClone(), is(true));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl())).isShallowClone(), is(false));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, true)).isShallowClone(), is(true));
assertThat(new GitMaterial(new GitMaterialConfig(repo.projectRepositoryUrl(), GitMaterialConfig.DEFAULT_BRANCH, false)).isShallowClone(), is(false));
TestRepo.internalTearDown();
}
@Test
public void shouldGetLatestModificationWithShallowClone() throws IOException {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
List<Modification> mods = material.latestModification(workingDir, context());
assertThat(mods.size(), is(1));
assertThat(mods.get(0).getComment(), Matchers.is("Added 'run-till-file-exists' ant target"));
assertThat(localRepoFor(material).isShallow(), is(true));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_0), is(false));
assertThat(localRepoFor(material).currentRevision(), is(REVISION_4.getRevision()));
}
@Test
public void shouldGetModificationSinceANotInitiallyClonedRevision() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
List<Modification> modifications = material.modificationsSince(workingDir, REVISION_0, context());
assertThat(modifications.size(), is(4));
assertThat(modifications.get(0).getRevision(), is(REVISION_4.getRevision()));
assertThat(modifications.get(0).getComment(), is("Added 'run-till-file-exists' ant target"));
assertThat(modifications.get(1).getRevision(), is(REVISION_3.getRevision()));
assertThat(modifications.get(1).getComment(), is("adding build.xml"));
assertThat(modifications.get(2).getRevision(), is(REVISION_2.getRevision()));
assertThat(modifications.get(2).getComment(), is("Created second.txt from first.txt"));
assertThat(modifications.get(3).getRevision(), is(REVISION_1.getRevision()));
assertThat(modifications.get(3).getComment(), is("Added second line"));
}
@Test
public void shouldBeAbleToUpdateToRevisionNotFetched() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_3, REVISION_2, 2), context());
assertThat(localRepoFor(material).currentRevision(), is(REVISION_3.getRevision()));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_2), is(true));
assertThat(localRepoFor(material).containsRevisionInBranch(REVISION_3), is(true));
}
@Test
public void configShouldIncludesShallowFlag() {
GitMaterialConfig shallowConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), true).config();
assertThat(shallowConfig.isShallowClone(), is(true));
GitMaterialConfig normalConfig = (GitMaterialConfig) new GitMaterial(repo.projectRepositoryUrl(), null).config();
assertThat(normalConfig.isShallowClone(), is(false));
}
@Test
public void xmlAttributesShouldIncludesShallowFlag() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
assertThat(material.getAttributesForXml().get("shallowClone"), Is.<Object>is(true));
}
@Test
public void attributesShouldIncludeShallowFlag() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
Map gitConfig = (Map) (material.getAttributes(false).get("git-configuration"));
assertThat(gitConfig.get("shallow-clone"), Is.<Object>is(true));
}
@Test
public void shouldConvertExistingRepoToFullRepoWhenShallowCloneIsOff() {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.latestModification(workingDir, context());
assertThat(localRepoFor(material).isShallow(), is(true));
material = new GitMaterial(repo.projectRepositoryUrl(), false);
material.latestModification(workingDir, context());
assertThat(localRepoFor(material).isShallow(), is(false));
}
@Test
public void withShallowCloneShouldGenerateANewMaterialWithOverriddenShallowConfig() {
GitMaterial original = new GitMaterial(repo.projectRepositoryUrl(), false);
assertThat(original.withShallowClone(true).isShallowClone(), is(true));
assertThat(original.withShallowClone(false).isShallowClone(), is(false));
assertThat(original.isShallowClone(), is(false));
}
@Test
public void updateToANewRevisionShouldNotResultInUnshallowing() throws IOException {
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(REVISION_4, REVISION_4, 1), context());
assertThat(localRepoFor(material).isShallow(), is(true));
List<Modification> modifications = repo.addFileAndPush("newfile", "add new file");
StringRevision newRevision = new StringRevision(modifications.get(0).getRevision());
material.updateTo(inMemoryConsumer(), workingDir, new RevisionContext(newRevision, newRevision, 1), context());
assertThat(new File(workingDir, "newfile").exists(), is(true));
assertThat(localRepoFor(material).isShallow(), is(true));
}
@Test
public void shouldUnshallowServerSideRepoCompletelyOnRetrievingModificationsSincePreviousRevision() {
SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class);
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(false);
material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, true));
assertThat(localRepoFor(material).isShallow(), is(false));
}
@Test
public void shouldNotUnshallowOnServerSideIfShallowClonePropertyIsOnAndRepoIsAlreadyShallow() {
SystemEnvironment mockSystemEnvironment = mock(SystemEnvironment.class);
GitMaterial material = new GitMaterial(repo.projectRepositoryUrl(), true);
when(mockSystemEnvironment.get(SystemEnvironment.GO_SERVER_SHALLOW_CLONE)).thenReturn(true);
material.modificationsSince(workingDir, REVISION_4, new TestSubprocessExecutionContext(mockSystemEnvironment, false));
assertThat(localRepoFor(material).isShallow(), is(true));
}
private TestSubprocessExecutionContext context() {
return new TestSubprocessExecutionContext();
}
private GitCommand localRepoFor(GitMaterial material) {
return new GitCommand(material.getFingerprint(), workingDir, GitMaterialConfig.DEFAULT_BRANCH, false, new HashMap<>());
}
}
| soundcloud/gocd | domain/test/com/thoughtworks/go/config/materials/git/GitMaterialShallowCloneTest.java | Java | apache-2.0 | 9,537 |
package wei_chih.service.handler.wei_chih;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.Socket;
import java.security.KeyPair;
import java.security.PublicKey;
import java.security.SignatureException;
import java.util.concurrent.locks.ReentrantLock;
import java.util.logging.Level;
import java.util.logging.Logger;
import message.Operation;
import message.OperationType;
import service.Key;
import service.KeyManager;
import service.handler.ConnectionHandler;
import wei_chih.service.Config;
import wei_chih.service.SocketServer;
import wei_chih.utility.MerkleTree;
import wei_chih.utility.Utils;
import wei_chih.message.wei_chih.Request;
import wei_chih.message.wei_chih.Acknowledgement;
/**
*
* @author Chienweichih
*/
public class WeiChihHandler extends ConnectionHandler {
private static final ReentrantLock LOCK;
private static final MerkleTree[] merkleTree;
private static final String[] digestBeforeUpdate;
private static final Operation[] lastOP;
private static final Integer[] sequenceNumbers;
static {
merkleTree = new MerkleTree[Config.SERVICE_NUM];
digestBeforeUpdate = new String[Config.SERVICE_NUM];
lastOP = new Operation[Config.SERVICE_NUM];
sequenceNumbers = new Integer[Config.SERVICE_NUM];
for (int i = 0; i < Config.SERVICE_NUM; ++i) {
merkleTree[i] = new MerkleTree(new File(SocketServer.dataDirPath));
digestBeforeUpdate[i] = "";
lastOP[i] = new Operation(OperationType.DOWNLOAD, "", merkleTree[i].getRootHash());
sequenceNumbers[i] = 0;
}
LOCK = new ReentrantLock();
}
public WeiChihHandler(Socket socket, KeyPair keyPair) {
super(socket, keyPair);
}
@Override
protected void handle(DataOutputStream out, DataInputStream in) {
PublicKey clientPubKey = KeyManager.getInstance().getPublicKey(Key.CLIENT);
int portIndex = 0;
if (Math.abs(socket.getPort() - Config.SERVICE_PORT[0]) < 10) {
portIndex = socket.getPort() - Config.SERVICE_PORT[0];
} else if (Math.abs(socket.getLocalPort() - Config.SERVICE_PORT[0]) < 10) {
portIndex = socket.getLocalPort() - Config.SERVICE_PORT[0];
}
try {
Request req = Request.parse(Utils.receive(in));
LOCK.lock();
if (!req.validate(clientPubKey)) {
throw new SignatureException("REQ validation failure");
}
Operation op = req.getOperation();
switch (op.getType()) {
case UPLOAD:
digestBeforeUpdate[portIndex] = merkleTree[portIndex].getDigest(op.getPath());
merkleTree[portIndex].update(op.getPath(), op.getMessage());
case DOWNLOAD:
// both upload and download, so no break
if (0 != op.getClientID().compareTo(String.valueOf(sequenceNumbers[portIndex]))) {
throw new java.security.InvalidParameterException();
}
sequenceNumbers[portIndex]++;
default:
}
File file = new File(SocketServer.dataDirPath + op.getPath());
String rootHash = merkleTree[portIndex].getRootHash();
String fileHash = null;
if (file.exists()) {
fileHash = Utils.digest(file, Config.DIGEST_ALGORITHM);
}
Acknowledgement ack = new Acknowledgement(rootHash, fileHash, req);
ack.sign(keyPair);
Utils.send(out, ack.toString());
switch (op.getType()) {
case DOWNLOAD:
lastOP[portIndex] = op;
if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) {
Utils.send(out, file);
}
break;
case UPLOAD:
lastOP[portIndex] = op;
if (portIndex + Config.SERVICE_PORT[0] == Config.SERVICE_PORT[0]) {
file = new File(Config.DOWNLOADS_DIR_PATH + op.getPath());
Utils.receive(in, file);
String digest = Utils.digest(file, Config.DIGEST_ALGORITHM);
if (0 != op.getMessage().compareTo(digest)) {
throw new java.io.IOException();
}
}
break;
case AUDIT:
file = new File(Config.ATTESTATION_DIR_PATH + "/service-provider/voting");
switch (lastOP[portIndex].getType()) {
case DOWNLOAD:
Utils.write(file, rootHash);
break;
case UPLOAD:
MerkleTree prevMerkleTree = new MerkleTree(merkleTree[portIndex]);
prevMerkleTree.update(lastOP[portIndex].getPath(), digestBeforeUpdate[portIndex]);
Utils.Serialize(file, prevMerkleTree);
break;
default:
throw new java.lang.Error();
}
Utils.send(out, file);
break;
default:
}
socket.close();
} catch (IOException | SignatureException ex) {
Logger.getLogger(WeiChihHandler.class.getName()).log(Level.SEVERE, null, ex);
} finally {
if (LOCK != null) {
LOCK.unlock();
}
}
}
}
| CloudComLab/Voting-CAP | src/wei_chih/service/handler/wei_chih/WeiChihHandler.java | Java | apache-2.0 | 5,740 |
/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 2012-2019 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "histogram.h"
#include <string>
using namespace cbc;
using std::string;
void Histogram::install(lcb_INSTANCE *inst, FILE *out)
{
lcb_STATUS rc;
output = out;
lcb_enable_timings(inst);
rc = lcb_cntl(inst, LCB_CNTL_GET, LCB_CNTL_KVTIMINGS, &hg);
lcb_assert(rc == LCB_SUCCESS);
lcb_assert(hg != NULL);
(void)rc;
}
void Histogram::installStandalone(FILE *out)
{
if (hg != NULL) {
return;
}
hg = lcb_histogram_create();
output = out;
}
void Histogram::write()
{
if (hg == NULL) {
return;
}
lcb_histogram_print(hg, output);
}
void Histogram::record(lcb_U64 duration)
{
if (hg == NULL) {
return;
}
lcb_histogram_record(hg, duration);
}
| avsej/libcouchbase | tools/common/histogram.cc | C++ | apache-2.0 | 1,439 |
# frozen_string_literal: true
require_relative "ruby/version"
module XTF
module Ruby
class Error < StandardError; end
# placeholder
end
end
| jamieorc/xtf-ruby | lib/xtf/ruby.rb | Ruby | apache-2.0 | 154 |
/* -------------------------------------------------------------------------- */
/* Copyright 2002-2020, OpenNebula Project, OpenNebula Systems */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); you may */
/* not use this file except in compliance with the License. You may obtain */
/* a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
/* See the License for the specific language governing permissions and */
/* limitations under the License. */
/* -------------------------------------------------------------------------- */
define(function(require){
return 'acls-tab';
});
| baby-gnu/one | src/sunstone/public/app/tabs/acls-tab/tabId.js | JavaScript | apache-2.0 | 1,267 |
/* Copyright 2016-2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gomatrixserverlib
import (
"encoding/binary"
"fmt"
"sort"
"unicode/utf8"
"github.com/tidwall/gjson"
)
// CanonicalJSON re-encodes the JSON in a canonical encoding. The encoding is
// the shortest possible encoding using integer values with sorted object keys.
// https://matrix.org/docs/spec/server_server/unstable.html#canonical-json
func CanonicalJSON(input []byte) ([]byte, error) {
if !gjson.Valid(string(input)) {
return nil, fmt.Errorf("invalid json")
}
return CanonicalJSONAssumeValid(input), nil
}
// CanonicalJSONAssumeValid is the same as CanonicalJSON, but assumes the
// input is valid JSON
func CanonicalJSONAssumeValid(input []byte) []byte {
input = CompactJSON(input, make([]byte, 0, len(input)))
return SortJSON(input, make([]byte, 0, len(input)))
}
// SortJSON reencodes the JSON with the object keys sorted by lexicographically
// by codepoint. The input must be valid JSON.
func SortJSON(input, output []byte) []byte {
result := gjson.ParseBytes(input)
RawJSON := RawJSONFromResult(result, input)
return sortJSONValue(result, RawJSON, output)
}
// sortJSONValue takes a gjson.Result and sorts it. inputJSON must be the
// raw JSON bytes that gjson.Result points to.
func sortJSONValue(input gjson.Result, inputJSON, output []byte) []byte {
if input.IsArray() {
return sortJSONArray(input, inputJSON, output)
}
if input.IsObject() {
return sortJSONObject(input, inputJSON, output)
}
// If its neither an object nor an array then there is no sub structure
// to sort, so just append the raw bytes.
return append(output, inputJSON...)
}
// sortJSONArray takes a gjson.Result and sorts it, assuming its an array.
// inputJSON must be the raw JSON bytes that gjson.Result points to.
func sortJSONArray(input gjson.Result, inputJSON, output []byte) []byte {
sep := byte('[')
// Iterate over each value in the array and sort it.
input.ForEach(func(_, value gjson.Result) bool {
output = append(output, sep)
sep = ','
RawJSON := RawJSONFromResult(value, inputJSON)
output = sortJSONValue(value, RawJSON, output)
return true // keep iterating
})
if sep == '[' {
// If sep is still '[' then the array was empty and we never wrote the
// initial '[', so we write it now along with the closing ']'.
output = append(output, '[', ']')
} else {
// Otherwise we end the array by writing a single ']'
output = append(output, ']')
}
return output
}
// sortJSONObject takes a gjson.Result and sorts it, assuming its an object.
// inputJSON must be the raw JSON bytes that gjson.Result points to.
func sortJSONObject(input gjson.Result, inputJSON, output []byte) []byte {
type entry struct {
key string // The parsed key string
rawKey []byte // The raw, unparsed key JSON string
value gjson.Result
}
var entries []entry
// Iterate over each key/value pair and add it to a slice
// that we can sort
input.ForEach(func(key, value gjson.Result) bool {
entries = append(entries, entry{
key: key.String(),
rawKey: RawJSONFromResult(key, inputJSON),
value: value,
})
return true // keep iterating
})
// Sort the slice based on the *parsed* key
sort.Slice(entries, func(a, b int) bool {
return entries[a].key < entries[b].key
})
sep := byte('{')
for _, entry := range entries {
output = append(output, sep)
sep = ','
// Append the raw unparsed JSON key, *not* the parsed key
output = append(output, entry.rawKey...)
output = append(output, ':')
RawJSON := RawJSONFromResult(entry.value, inputJSON)
output = sortJSONValue(entry.value, RawJSON, output)
}
if sep == '{' {
// If sep is still '{' then the object was empty and we never wrote the
// initial '{', so we write it now along with the closing '}'.
output = append(output, '{', '}')
} else {
// Otherwise we end the object by writing a single '}'
output = append(output, '}')
}
return output
}
// CompactJSON makes the encoded JSON as small as possible by removing
// whitespace and unneeded unicode escapes
func CompactJSON(input, output []byte) []byte {
var i int
for i < len(input) {
c := input[i]
i++
// The valid whitespace characters are all less than or equal to SPACE 0x20.
// The valid non-white characters are all greater than SPACE 0x20.
// So we can check for whitespace by comparing against SPACE 0x20.
if c <= ' ' {
// Skip over whitespace.
continue
}
// Add the non-whitespace character to the output.
output = append(output, c)
if c == '"' {
// We are inside a string.
for i < len(input) {
c = input[i]
i++
// Check if this is an escape sequence.
if c == '\\' {
escape := input[i]
i++
if escape == 'u' {
// If this is a unicode escape then we need to handle it specially
output, i = compactUnicodeEscape(input, output, i)
} else if escape == '/' {
// JSON does not require escaping '/', but allows encoders to escape it as a special case.
// Since the escape isn't required we remove it.
output = append(output, escape)
} else {
// All other permitted escapes are single charater escapes that are already in their shortest form.
output = append(output, '\\', escape)
}
} else {
output = append(output, c)
}
if c == '"' {
break
}
}
}
}
return output
}
// compactUnicodeEscape unpacks a 4 byte unicode escape starting at index.
// If the escape is a surrogate pair then decode the 6 byte \uXXXX escape
// that follows. Returns the output slice and a new input index.
func compactUnicodeEscape(input, output []byte, index int) ([]byte, int) {
const (
ESCAPES = "uuuuuuuubtnufruuuuuuuuuuuuuuuuuu"
HEX = "0123456789ABCDEF"
)
// If there aren't enough bytes to decode the hex escape then return.
if len(input)-index < 4 {
return output, len(input)
}
// Decode the 4 hex digits.
c := readHexDigits(input[index:])
index += 4
if c < ' ' {
// If the character is less than SPACE 0x20 then it will need escaping.
escape := ESCAPES[c]
output = append(output, '\\', escape)
if escape == 'u' {
output = append(output, '0', '0', byte('0'+(c>>4)), HEX[c&0xF])
}
} else if c == '\\' || c == '"' {
// Otherwise the character only needs escaping if it is a QUOTE '"' or BACKSLASH '\\'.
output = append(output, '\\', byte(c))
} else if c < 0xD800 || c >= 0xE000 {
// If the character isn't a surrogate pair then encoded it directly as UTF-8.
var buffer [4]byte
n := utf8.EncodeRune(buffer[:], rune(c))
output = append(output, buffer[:n]...)
} else {
// Otherwise the escaped character was the first part of a UTF-16 style surrogate pair.
// The next 6 bytes MUST be a '\uXXXX'.
// If there aren't enough bytes to decode the hex escape then return.
if len(input)-index < 6 {
return output, len(input)
}
// Decode the 4 hex digits from the '\uXXXX'.
surrogate := readHexDigits(input[index+2:])
index += 6
// Reconstruct the UCS4 codepoint from the surrogates.
codepoint := 0x10000 + (((c & 0x3FF) << 10) | (surrogate & 0x3FF))
// Encode the charater as UTF-8.
var buffer [4]byte
n := utf8.EncodeRune(buffer[:], rune(codepoint))
output = append(output, buffer[:n]...)
}
return output, index
}
// Read 4 hex digits from the input slice.
// Taken from https://github.com/NegativeMjark/indolentjson-rust/blob/8b959791fe2656a88f189c5d60d153be05fe3deb/src/readhex.rs#L21
func readHexDigits(input []byte) uint32 {
hex := binary.BigEndian.Uint32(input)
// subtract '0'
hex -= 0x30303030
// strip the higher bits, maps 'a' => 'A'
hex &= 0x1F1F1F1F
mask := hex & 0x10101010
// subtract 'A' - 10 - '9' - 9 = 7 from the letters.
hex -= mask >> 1
hex += mask >> 4
// collect the nibbles
hex |= hex >> 4
hex &= 0xFF00FF
hex |= hex >> 8
return hex & 0xFFFF
}
// RawJSONFromResult extracts the raw JSON bytes pointed to by result.
// input must be the json bytes that were used to generate result
func RawJSONFromResult(result gjson.Result, input []byte) (RawJSON []byte) {
// This is lifted from gjson README. Basically, result.Raw is a copy of
// the bytes we want, but its more efficient to take a slice.
// If Index is 0 then for some reason we can't extract it from the original
// JSON bytes.
if result.Index > 0 {
RawJSON = input[result.Index : result.Index+len(result.Raw)]
} else {
RawJSON = []byte(result.Raw)
}
return
}
| gperdomor/dendrite | vendor/src/github.com/matrix-org/gomatrixserverlib/json.go | GO | apache-2.0 | 8,987 |
package com.oauth.services.security;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.keygen.BytesKeyGenerator;
import org.springframework.security.crypto.keygen.KeyGenerators;
import org.springframework.security.crypto.password.StandardPasswordEncoder;
/**
* Created by yichen.wei on 6/24/17.
*/
public class Test {
public static void main(String args[]) {
BytesKeyGenerator saltGenerator = KeyGenerators.secureRandom();
// StandardPasswordEncoder encode = new StandardPasswordEncoder("SHA-256", "");
// StandardPasswordEncoder encode = new StandardPasswordEncoder("");
StandardPasswordEncoder encode = new StandardPasswordEncoder();
System.out.println("abcfwef...");
//a8ba715d5a076c99b95995d357651df5c296bf308abaa154a54d2418885ec622e9fe8624f2e06524
//be1e54adbd1c5c5d58a714fad7d529c73198c8c51e1f9d43edc79dac4784b5e93460605fe7082b0d
//910a6df88a99d5d81f3376628f3fd6a91a2152a366f2d450ef9220ff32f0c74952f754da62cd5a13
System.out.println(encode.encode("abcdef"));
// System.out.println(encode.encode("mypass"));
String salt = saltGenerator.generateKey().toString();
System.out.println(salt);
System.out.println(saltGenerator.getKeyLength());
BCryptPasswordEncoder bc = new BCryptPasswordEncoder();
System.out.println(bc.encode("admin"));
}
}
| kinddevil/course-service | oauth/src/main/java/com/oauth/services/security/Test.java | Java | apache-2.0 | 1,438 |
<?php
/**
* Created by PhpStorm.
* User: Bartosz Bartniczak <kontakt@bartoszbartniczak.pl>
*/
namespace BartoszBartniczak\EventSourcing\Shop\User\Event;
use BartoszBartniczak\EventSourcing\Event\Id;
class UserAccountHasBeenActivated extends Event
{
/**
* @var string
*/
protected $activationToken;
/**
* @inheritDoc
*/
public function __construct(Id $eventId, \DateTime $dateTime, string $userEmail, string $activationToken)
{
parent::__construct($eventId, $dateTime, $userEmail);
$this->activationToken = $activationToken;
}
/**
* @return string
*/
public function getActivationToken(): string
{
return $this->activationToken;
}
} | BartoszBartniczak/EventSourcing-Example | src/User/Event/UserAccountHasBeenActivated.php | PHP | apache-2.0 | 735 |
/*
* Copyright 2013 Square Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package flowless;
import android.os.Parcelable;
import android.support.annotation.NonNull;
/**
* Used by History to convert your key objects to and from instances of
* {@link android.os.Parcelable}.
*/
public interface KeyParceler {
@NonNull
Parcelable toParcelable(@NonNull Object key);
@NonNull
Object toKey(@NonNull Parcelable parcelable);
}
| Zhuinden/flowless | flowless-library/src/main/java/flowless/KeyParceler.java | Java | apache-2.0 | 964 |
/*
* Copyright 2012 Michael Bischoff
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.jpaw.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.Externalizable;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.OutputStream;
import java.nio.charset.Charset;
/**
* Functionality which corresponds to String, but for byte arrays.
* Essential feature is that the class is immutable, so you can use it in messaging without making deep copies.
* Mimicking {@link java.lang.String}, the class contains offset and length fields to allow sharing of the buffer.
* <p>
* This should really exist in Java SE already.
*
* @author Michael Bischoff
*
*/
public final class ByteArray implements Externalizable, Cloneable {
private static final long serialVersionUID = 2782729564297256974L;
public static final Charset CHARSET_UTF8 = Charset.forName("UTF-8"); // default character set is available on all platforms
private static final int MAGIC_LENGTH_INDICATING_32_BIT_SIZE = 247; // if a single byte length of this value is written in the
// serialized form, it indicates a full four byte length must be read instead. Not used 0 or 255 due to their frequent use.
private final byte[] buffer;
private final int offset;
private final int length;
private ByteArray extraFieldJustRequiredForDeserialization = null; // transient temporary field
private static final byte[] ZERO_JAVA_BYTE_ARRAY = new byte[0];
public static final ByteArray ZERO_BYTE_ARRAY = new ByteArray(ZERO_JAVA_BYTE_ARRAY);
/** No-arg constructor required for Serializable interface. */
@Deprecated
public ByteArray() {
this(ZERO_JAVA_BYTE_ARRAY);
}
/** Constructs a ByteArray from a source byte[], which is defensively copied. */
public ByteArray(final byte[] source) {
if (source == null || source.length == 0) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = source.clone(); // benchmarks have shown that clone() is equally fast as System.arraycopy for all lengths > 0
offset = 0;
length = buffer.length;
}
}
// construct a ByteArray from a trusted source byte[]
// this method is always called with unsafeTrustedReuseOfJavaByteArray = true, the parameter is only required in order to distinguish the constructor
// from the copying one
private ByteArray(final byte[] source, final boolean unsafeTrustedReuseOfJavaByteArray) {
if (source == null || source.length == 0) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = unsafeTrustedReuseOfJavaByteArray ? source : source.clone();
offset = 0;
length = buffer.length;
}
}
/** Constructs a ByteArray from a ByteArrayOutputStream, which has just been contructed by some previous process.
* @throws IOException */
public static ByteArray fromByteArrayOutputStream(final ByteArrayOutputStream baos) throws IOException {
baos.flush();
return new ByteArray(baos.toByteArray(), true);
}
/** Writes the contents of this ByteArray to an OutputStream. */
public void toOutputStream(final OutputStream os) throws IOException {
os.write(buffer, offset, length);
}
/** Constructs a ByteArray from the provided DataInput, with a predefined length. */
public static ByteArray fromDataInput(final DataInput in, final int len) throws IOException {
if (len <= 0)
return ZERO_BYTE_ARRAY;
final byte[] tmp = new byte[len];
in.readFully(tmp);
return new ByteArray(tmp, true);
}
/** read bytes from an input stream, up to maxBytes (or all which exist, if maxBytes = 0). */
public static ByteArray fromInputStream(final InputStream is, final int maxBytes) throws IOException {
final ByteBuilder tmp = maxBytes > 0 ? new ByteBuilder(maxBytes, CHARSET_UTF8) : new ByteBuilder();
tmp.readFromInputStream(is, maxBytes);
if (tmp.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(tmp.getCurrentBuffer(), 0, tmp.length());
}
/** Constructs a ByteArray from the provided ByteBuilder. */
public static ByteArray fromByteBuilder(final ByteBuilder in) {
if (in == null || in.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(in.getCurrentBuffer(), 0, in.length());
}
/** Constructs a ByteArray from the provided String, using the UTF8 character set. */
public static ByteArray fromString(final String in) {
return fromString(in, CHARSET_UTF8);
}
/** Constructs a ByteArray from the provided String, using the specified character set. */
public static ByteArray fromString(final String in, final Charset cs) {
if (in == null || in.length() == 0)
return ZERO_BYTE_ARRAY;
return new ByteArray(in.getBytes(cs), true); // we know these bytes are never changed, so no extra copy required
}
/** returns the byte array as a string. Unlike toString(), which uses the JVM default character set, this method always uses UTF-8. */
public String asString() {
return asString(CHARSET_UTF8);
}
/** returns the byte array as a string, using a specified character set. */
public String asString(final Charset cs) {
return new String(buffer, offset, length, cs);
}
/** construct a ByteArray from a source byte[], with offset and length. source may not be null. */
public ByteArray(final byte[] source, final int offset, final int length) {
if (source == null || offset < 0 || length < 0 || offset + length > source.length)
throw new IllegalArgumentException();
buffer = new byte[length];
System.arraycopy(source, offset, buffer, 0, length);
this.offset = 0;
this.length = length;
}
/** Construct a ByteArray from another one. Could also just assign it due to immutability.
* The only benefit of this constructor is that it converts a null parameter into the non-null empty ByteArray. */
public ByteArray(final ByteArray source) {
if (source == null) {
buffer = ZERO_JAVA_BYTE_ARRAY;
offset = 0;
length = 0;
} else {
buffer = source.buffer; // no array copy required due to immutability
offset = source.offset;
length = source.length;
}
}
/** Construct a ByteArray from a source byte[], with offset and length. source may not be null.
* Similar to the subArray member method. */
public ByteArray(final ByteArray source, final int offset, final int length) {
if (source == null || offset < 0 || length < 0 || offset + length > source.length)
throw new IllegalArgumentException();
this.buffer = source.buffer; // no array copy required due to immutability
this.offset = source.offset + offset;
this.length = length;
}
/** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is shared.
* Functionality wise this corresponds to String.substring (before Java 6) or ByteBuffer.slice. */
public ByteArray subArray(final int xoffset, final int xlength) {
// create a new ByteArray sharing the same buffer
return new ByteArray(this, xoffset, xlength);
}
/** Returns a ByteArray which contains a subsequence of the bytes of this one. The underlying buffer is not shared.
* Use this variant if the original ByteArray holds a much larger byte[] and can be GCed afterwards. */
public ByteArray subArrayUnshared(final int xoffset, final int xlength) {
if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length)
throw new IllegalArgumentException();
final byte[] newBuffer = new byte[xlength];
System.arraycopy(buffer, xoffset, newBuffer, 0, xlength);
// create a new ByteArray using the new buffer
return new ByteArray(newBuffer, true);
}
@Override
public ByteArray clone() {
return new ByteArray(this);
}
public int length() {
return this.length;
}
// public int getOffset() {
// return this.offset;
// }
//
// /** Returns the internal buffer of this object. It may only be used for read-only access.
// * Java is missing a "const" specifier for arrays as it is available in C and C++.
// *
// * Java-purists will complain against exposing this internal state of an immutable object, but as long as
// * access is possible via reflection anyway, just with performance penalty, it would be outright stupid
// * to force people to use reflection, or even defensive copies. Instead I hope the name of the method
// * documents the intended use.
// */
// public byte /* const */[] unsafe$getConstBufferOfConstBytes() {
// return this.buffer;
// }
public int indexOf(final byte x) {
int i = 0;
while (i < length) {
if (buffer[offset + i] == x)
return i;
++i;
}
return -1;
}
public int indexOf(final byte x, final int fromIndex) {
int i = fromIndex >= 0 ? fromIndex : 0;
while (i < length) {
if (buffer[offset + i] == x)
return i;
++i;
}
return -1;
}
public int lastIndexOf(final byte x) {
int i = length;
while (i > 0) {
if (buffer[offset + --i] == x)
return i;
}
return -1;
}
public int lastIndexOf(final byte x, final int fromIndex) {
int i = fromIndex >= length ? length - 1 : fromIndex;
while (i >= 0) {
if (buffer[offset + i] == x)
return i;
--i;
}
return -1;
}
public byte byteAt(final int pos) {
if (pos < 0 || pos >= length)
throw new IllegalArgumentException();
return buffer[offset + pos];
}
/** Provides the contents of this ByteArray to some InputStream. */
public ByteArrayInputStream asByteArrayInputStream() {
return new ByteArrayInputStream(buffer, offset, length());
}
// return a defensive copy of the contents
public byte[] getBytes() {
final byte[] result = new byte[length];
System.arraycopy(buffer, offset, result, 0, length);
return result;
}
// return a defensive copy of part of the contents. Shorthand for subArray(offset, length).getBytes(),
// which would create a temporary object
public byte[] getBytes(final int xoffset, final int xlength) {
if (xoffset < 0 || xlength < 0 || xoffset + xlength > this.length)
throw new IllegalArgumentException();
final byte[] result = new byte[xlength];
System.arraycopy(buffer, xoffset + this.offset, result, 0, xlength);
return result;
}
private boolean contentEqualsSub(final byte[] dst, final int dstOffset, final int dstLength) {
if (length != dstLength)
return false;
for (int i = 0; i < dstLength; ++i) {
if (buffer[offset + i] != dst[dstOffset + i])
return false;
}
return true;
}
// following: all arguments must be not null
public boolean contentEquals(final ByteArray that) {
return contentEqualsSub(that.buffer, that.offset, that.length);
}
public boolean contentEquals(final byte[] that) {
return contentEqualsSub(that, 0, that.length);
}
public boolean contentEquals(final byte[] that, final int thatOffset, final int thatLength) {
if (thatOffset < 0 || thatLength < 0 || thatOffset + thatLength > that.length)
throw new IllegalArgumentException();
return contentEqualsSub(that, thatOffset, thatLength);
}
// returns if the two instances share the same backing buffer (for debugging)
public boolean shareBuffer(final ByteArray that) {
return buffer == that.buffer;
}
@Override
public int hashCode() {
int hash = 997;
for (int i = 0; i < length; ++i) {
hash = 29 * hash + buffer[offset + i];
}
return hash;
}
// two ByteArrays are considered equal if they have the same visible contents
@Override
public boolean equals(final Object that) {
if (this == that)
return true;
if (that == null || getClass() != that.getClass())
return false;
final ByteArray xthat = (ByteArray)that;
// same as contentEqualsSub(..) now
if (this.length != xthat.length)
return false;
for (int i = 0; i < length; ++i) {
if (buffer[offset + i] != xthat.buffer[xthat.offset + i])
return false;
}
return true;
}
// support function to allow dumping contents to DataOutput without the need to expose our internal buffer
public void writeToDataOutput(final DataOutput out) throws IOException {
out.write(buffer, offset, length);
}
public String hexdump(final int startAt, final int maxlength) {
if (length <= startAt)
return ""; // no data to dump
return ByteUtil.dump(buffer, offset + startAt, (maxlength > 0 && maxlength < length) ? maxlength : length);
}
@Override
public void writeExternal(final ObjectOutput out) throws IOException {
//writeBytes(out, buffer, offset, length);
if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) {
out.writeByte(length);
} else {
out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE);
out.writeInt(length);
}
out.write(buffer, offset, length);
}
// support function to allow ordinary byte[] to be written in same fashion
public static void writeBytes(final ObjectOutput out, final byte[] buffer, final int offset, final int length) throws IOException {
if (length < 256 && length != MAGIC_LENGTH_INDICATING_32_BIT_SIZE) {
out.writeByte(length);
} else {
out.writeByte(MAGIC_LENGTH_INDICATING_32_BIT_SIZE);
out.writeInt(length);
}
out.write(buffer, offset, length);
}
public static byte[] readBytes(final ObjectInput in) throws IOException {
int newlength = in.readByte();
if (newlength < 0)
newlength += 256; // want full unsigned range
if (newlength == MAGIC_LENGTH_INDICATING_32_BIT_SIZE) // magic to indicate four byte length
newlength = in.readInt();
// System.out.println("ByteArray.readExternal() with length " + newlength);
if (newlength == 0)
return ZERO_JAVA_BYTE_ARRAY;
final byte[] localBuffer = new byte[newlength];
int done = 0;
while (done < newlength) {
final int nRead = in.read(localBuffer, done, newlength - done); // may return less bytes than requested!
if (nRead <= 0)
throw new IOException("deserialization of ByteArray returned " + nRead + " while expecting " + (newlength - done));
done += nRead;
}
return localBuffer;
}
// factory method to read from objectInput via above helper function
public static ByteArray read(final ObjectInput in) throws IOException {
return new ByteArray(readBytes(in), true);
}
// a direct implementation of this method would conflict with the immutability / "final" attributes of the field
// Weird Java language design again. If readExternal() is kind of a constructor, why are assignments to final fields not allowed here?
// alternatives around are to add artificial fields and use readResolve / proxies or to discard the "final" attributes,
// or using reflection to set the values (!?). Bleh!
// We're using kind of Bloch's "proxy" pattern (Essential Java, #78), namely a single-sided variant with just a single additonal member field,
// which lets us preserve the immutability
// see also http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6379948 for discussion around this
@Override
public void readExternal(final ObjectInput in) throws IOException {
extraFieldJustRequiredForDeserialization = new ByteArray(readBytes(in), true);
}
public Object readResolve() {
// System.out.println("ByteArray.readResolve()");
if (extraFieldJustRequiredForDeserialization == null)
throw new RuntimeException("readResolve() called on instance not obtained via readExternal()");
return extraFieldJustRequiredForDeserialization;
}
// factory method to construct a byte array from a prevalidated base64 byte sequence. returns null if length is suspicious
public static ByteArray fromBase64(final byte[] data, final int offset, final int length) {
if (length == 0)
return ZERO_BYTE_ARRAY;
final byte[] tmp = Base64.decode(data, offset, length);
if (tmp == null)
return null;
return new ByteArray(tmp, true);
}
public void appendBase64(final ByteBuilder b) {
Base64.encodeToByte(b, buffer, offset, length);
}
public void appendToRaw(final ByteBuilder b) {
b.write(buffer, offset, length);
}
/** Returns the contents of this ByteArray as a base64 encoded string.
* @since 1.2.12 */
public String asBase64() {
final ByteBuilder tmp = new ByteBuilder(0, null);
Base64.encodeToByte(tmp, buffer, offset, length);
return tmp.toString();
}
// returns the String representation of the visible bytes portion
@Override
public String toString() {
return new String(buffer, offset, length);
}
}
| jpaw/jpaw | jpaw-util/src/main/java/de/jpaw/util/ByteArray.java | Java | apache-2.0 | 18,751 |
/*
* Copyright (C) 2014 Jörg Prante
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xbib.elasticsearch.plugin.jdbc.feeder;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.metrics.MeterMetric;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.JsonSettingsLoader;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.river.RiverName;
import org.xbib.elasticsearch.plugin.jdbc.RiverRunnable;
import org.xbib.elasticsearch.plugin.jdbc.classloader.uri.URIClassLoader;
import org.xbib.elasticsearch.plugin.jdbc.client.Ingest;
import org.xbib.elasticsearch.plugin.jdbc.client.IngestFactory;
import org.xbib.elasticsearch.plugin.jdbc.client.transport.BulkTransportClient;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronExpression;
import org.xbib.elasticsearch.plugin.jdbc.cron.CronThreadPoolExecutor;
import org.xbib.elasticsearch.plugin.jdbc.state.RiverStatesMetaData;
import org.xbib.elasticsearch.plugin.jdbc.util.RiverServiceLoader;
import org.xbib.elasticsearch.river.jdbc.RiverFlow;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Reader;
import java.io.Writer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentLinkedDeque;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.collect.Lists.newLinkedList;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* Standalone feeder for JDBC
*/
public class JDBCFeeder {
private final static ESLogger logger = ESLoggerFactory.getLogger("JDBCFeeder");
/**
* Register metadata factory in Elasticsearch for being able to decode
* ClusterStateResponse with RiverStatesMetadata
*/
static {
MetaData.registerFactory(RiverStatesMetaData.TYPE, RiverStatesMetaData.FACTORY);
}
protected Reader reader;
protected Writer writer;
protected PrintStream printStream;
protected IngestFactory ingestFactory;
/**
* This ingest is the client for the river flow state operations
*/
private Ingest ingest;
private RiverFlow riverFlow;
private List<Map<String, Object>> definitions;
private ThreadPoolExecutor threadPoolExecutor;
private volatile Thread feederThread;
private volatile boolean closed;
/**
* Constructor for running this from command line
*/
public JDBCFeeder() {
Runtime.getRuntime().addShutdownHook(shutdownHook());
}
public void exec() throws Exception {
readFrom(new InputStreamReader(System.in, "UTF-8"))
.writeTo(new OutputStreamWriter(System.out, "UTF-8"))
.errorsTo(System.err)
.start();
}
@SuppressWarnings("unchecked")
public JDBCFeeder readFrom(Reader reader) {
this.reader = reader;
try {
Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(reader).mapOrderedAndClose();
Settings settings = settingsBuilder()
.put(new JsonSettingsLoader().load(jsonBuilder().map(map).string()))
.build();
this.definitions = newLinkedList();
Object pipeline = map.get("jdbc");
if (pipeline instanceof Map) {
definitions.add((Map<String, Object>) pipeline);
}
if (pipeline instanceof List) {
definitions.addAll((List<Map<String, Object>>) pipeline);
}
// before running, create the river flow
createRiverFlow(map, settings);
} catch (IOException e) {
logger.error(e.getMessage(), e);
}
return this;
}
protected RiverFlow createRiverFlow(Map<String, Object> spec, Settings settings) throws IOException {
String strategy = XContentMapValues.nodeStringValue(spec.get("strategy"), "simple");
this.riverFlow = RiverServiceLoader.newRiverFlow(strategy);
logger.debug("strategy {}: river flow class {}, spec = {} settings = {}",
strategy, riverFlow.getClass().getName(), spec, settings.getAsMap());
this.ingestFactory = createIngestFactory(settings);
// out private ingest, needed for having a client in the river flow
this.ingest = ingestFactory.create();
riverFlow.setRiverName(new RiverName("jdbc", "feeder"))
.setSettings(settings)
.setClient(ingest.client())
.setIngestFactory(ingestFactory)
.setMetric(new MeterMetric(Executors.newScheduledThreadPool(1), TimeUnit.SECONDS))
.setQueue(new ConcurrentLinkedDeque<Map<String, Object>>());
return riverFlow;
}
public JDBCFeeder writeTo(Writer writer) {
this.writer = writer;
return this;
}
public JDBCFeeder errorsTo(PrintStream printStream) {
this.printStream = printStream;
return this;
}
public JDBCFeeder start() throws Exception {
this.closed = false;
if (ingest.getConnectedNodes().isEmpty()) {
throw new IOException("no nodes connected, can't continue");
}
this.feederThread = new Thread(new RiverRunnable(riverFlow, definitions));
List<Future<?>> futures = schedule(feederThread);
// wait for all threads to finish
for (Future<?> future : futures) {
future.get();
}
ingest.shutdown();
return this;
}
private List<Future<?>> schedule(Thread thread) {
Settings settings = riverFlow.getSettings();
String[] schedule = settings.getAsArray("schedule");
List<Future<?>> futures = newLinkedList();
Long seconds = settings.getAsTime("interval", TimeValue.timeValueSeconds(0)).seconds();
if (schedule != null && schedule.length > 0) {
CronThreadPoolExecutor cronThreadPoolExecutor =
new CronThreadPoolExecutor(settings.getAsInt("threadpoolsize", 1));
for (String cron : schedule) {
futures.add(cronThreadPoolExecutor.schedule(thread, new CronExpression(cron)));
}
this.threadPoolExecutor = cronThreadPoolExecutor;
logger.debug("scheduled feeder instance with cron expressions {}", Arrays.asList(schedule));
} else if (seconds > 0L) {
ScheduledThreadPoolExecutor scheduledThreadPoolExecutor =
new ScheduledThreadPoolExecutor(settings.getAsInt("threadpoolsize", 4));
futures.add(scheduledThreadPoolExecutor.scheduleAtFixedRate(thread, 0L, seconds, TimeUnit.SECONDS));
logger.debug("scheduled feeder instance at fixed rate of {} seconds", seconds);
this.threadPoolExecutor = scheduledThreadPoolExecutor;
} else {
this.threadPoolExecutor = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>());
futures.add(threadPoolExecutor.submit(thread));
logger.debug("started feeder instance");
}
return futures;
}
/**
* Shut down feeder instance by Ctrl-C
*
* @return shutdown thread
*/
public Thread shutdownHook() {
return new Thread() {
public void run() {
try {
shutdown();
} catch (Exception e) {
e.printStackTrace(printStream);
}
}
};
}
public synchronized void shutdown() throws Exception {
if (closed) {
return;
}
closed = true;
if (threadPoolExecutor != null) {
threadPoolExecutor.shutdownNow();
threadPoolExecutor = null;
}
if (feederThread != null) {
feederThread.interrupt();
}
if (!ingest.isShutdown()) {
ingest.shutdown();
}
reader.close();
writer.close();
printStream.close();
}
private IngestFactory createIngestFactory(final Settings settings) {
return new IngestFactory() {
@Override
public Ingest create() {
Integer maxbulkactions = settings.getAsInt("max_bulk_actions", 10000);
Integer maxconcurrentbulkrequests = settings.getAsInt("max_concurrent_bulk_requests",
Runtime.getRuntime().availableProcessors() * 2);
ByteSizeValue maxvolume = settings.getAsBytesSize("max_bulk_volume", ByteSizeValue.parseBytesSizeValue("10m"));
TimeValue maxrequestwait = settings.getAsTime("max_request_wait", TimeValue.timeValueSeconds(60));
TimeValue flushinterval = settings.getAsTime("flush_interval", TimeValue.timeValueSeconds(5));
File home = new File(settings.get("home", "."));
BulkTransportClient ingest = new BulkTransportClient();
Settings clientSettings = ImmutableSettings.settingsBuilder()
.put("cluster.name", settings.get("elasticsearch.cluster", "elasticsearch"))
.put("host", settings.get("elasticsearch.host", "localhost"))
.put("port", settings.getAsInt("elasticsearch.port", 9300))
.put("sniff", settings.getAsBoolean("elasticsearch.sniff", false))
.put("name", "feeder") // prevents lookup of names.txt, we don't have it, and marks this node as "feeder". See also module load skipping in JDBCRiverPlugin
.put("client.transport.ignore_cluster_name", true) // ignore cluster name setting
.put("client.transport.ping_timeout", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(10))) // ping timeout
.put("client.transport.nodes_sampler_interval", settings.getAsTime("elasticsearch.timeout", TimeValue.timeValueSeconds(5))) // for sniff sampling
.put("path.plugins", ".dontexist") // pointing to a non-exiting folder means, this disables loading site plugins
// adding our custom class loader is tricky, actions may not be registered to ActionService
.classLoader(getClassLoader(getClass().getClassLoader(), home))
.build();
ingest.maxActionsPerBulkRequest(maxbulkactions)
.maxConcurrentBulkRequests(maxconcurrentbulkrequests)
.maxVolumePerBulkRequest(maxvolume)
.maxRequestWait(maxrequestwait)
.flushIngestInterval(flushinterval)
.newClient(clientSettings);
return ingest;
}
};
}
/**
* We have to add Elasticsearch to our classpath, but exclude all jvm plugins
* for starting our TransportClient.
*
* @param home ES_HOME
* @return a custom class loader with our dependencies
*/
private ClassLoader getClassLoader(ClassLoader parent, File home) {
URIClassLoader classLoader = new URIClassLoader(parent);
File[] libs = new File(home + "/lib").listFiles();
if (libs != null) {
for (File file : libs) {
if (file.getName().toLowerCase().endsWith(".jar")) {
classLoader.addURI(file.toURI());
}
}
}
return classLoader;
}
}
| songwie/elasticsearch-river-jdbc | src/main/java/org/xbib/elasticsearch/plugin/jdbc/feeder/JDBCFeeder.java | Java | apache-2.0 | 12,908 |
/*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.service.neomedia.stats;
import org.jitsi.service.neomedia.*;
import java.util.*;
/**
* An extended interface for accessing the statistics of a {@link MediaStream}.
*
* The reason to extend the {@link MediaStreamStats} interface rather than
* adding methods into it is to allow the implementation to reside in a separate
* class. This is desirable in order to:
* 1. Help to keep the old interface for backward compatibility.
* 2. Provide a "clean" place where future code can be added, thus avoiding
* further cluttering of the already overly complicated
* {@link org.jitsi.impl.neomedia.MediaStreamStatsImpl}.
*
* @author Boris Grozev
*/
public interface MediaStreamStats2
extends MediaStreamStats
{
/**
* @return the instance which keeps aggregate statistics for the associated
* {@link MediaStream} in the receive direction.
*/
ReceiveTrackStats getReceiveStats();
/**
* @return the instance which keeps aggregate statistics for the associated
* {@link MediaStream} in the send direction.
*/
SendTrackStats getSendStats();
/**
* @return the instance which keeps statistics for a particular SSRC in the
* receive direction.
*/
ReceiveTrackStats getReceiveStats(long ssrc);
/**
* @return the instance which keeps statistics for a particular SSRC in the
* send direction.
*/
SendTrackStats getSendStats(long ssrc);
/**
* @return all per-SSRC statistics for the send direction.
*/
Collection<? extends SendTrackStats> getAllSendStats();
/**
* @return all per-SSRC statistics for the receive direction.
*/
Collection<? extends ReceiveTrackStats> getAllReceiveStats();
/**
* Clears send ssrc stats.
* @param ssrc the ssrc to clear.
*/
void clearSendSsrc(long ssrc);
}
| jitsi/libjitsi | src/main/java/org/jitsi/service/neomedia/stats/MediaStreamStats2.java | Java | apache-2.0 | 2,461 |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticloadbalancing.model;
import java.io.Serializable;
/**
*
*/
public class RegisterInstancesWithLoadBalancerResult implements Serializable, Cloneable {
/**
* The updated list of instances for the load balancer.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instances;
/**
* The updated list of instances for the load balancer.
*
* @return The updated list of instances for the load balancer.
*/
public java.util.List<Instance> getInstances() {
if (instances == null) {
instances = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>();
instances.setAutoConstruct(true);
}
return instances;
}
/**
* The updated list of instances for the load balancer.
*
* @param instances The updated list of instances for the load balancer.
*/
public void setInstances(java.util.Collection<Instance> instances) {
if (instances == null) {
this.instances = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size());
instancesCopy.addAll(instances);
this.instances = instancesCopy;
}
/**
* The updated list of instances for the load balancer.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setInstances(java.util.Collection)} or {@link
* #withInstances(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param instances The updated list of instances for the load balancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RegisterInstancesWithLoadBalancerResult withInstances(Instance... instances) {
if (getInstances() == null) setInstances(new java.util.ArrayList<Instance>(instances.length));
for (Instance value : instances) {
getInstances().add(value);
}
return this;
}
/**
* The updated list of instances for the load balancer.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param instances The updated list of instances for the load balancer.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public RegisterInstancesWithLoadBalancerResult withInstances(java.util.Collection<Instance> instances) {
if (instances == null) {
this.instances = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<Instance> instancesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<Instance>(instances.size());
instancesCopy.addAll(instances);
this.instances = instancesCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getInstances() != null) sb.append("Instances: " + getInstances() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getInstances() == null) ? 0 : getInstances().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof RegisterInstancesWithLoadBalancerResult == false) return false;
RegisterInstancesWithLoadBalancerResult other = (RegisterInstancesWithLoadBalancerResult)obj;
if (other.getInstances() == null ^ this.getInstances() == null) return false;
if (other.getInstances() != null && other.getInstances().equals(this.getInstances()) == false) return false;
return true;
}
@Override
public RegisterInstancesWithLoadBalancerResult clone() {
try {
return (RegisterInstancesWithLoadBalancerResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| trasa/aws-sdk-java | aws-java-sdk-elasticloadbalancing/src/main/java/com/amazonaws/services/elasticloadbalancing/model/RegisterInstancesWithLoadBalancerResult.java | Java | apache-2.0 | 5,518 |
package com.example.godtemper.db;
import java.util.ArrayList;
import java.util.List;
import com.example.godtemper.model.City;
import com.example.godtemper.model.County;
import com.example.godtemper.model.Province;
import android.R.integer;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
public class GodTemperDB {
/**
* Êý¾Ý¿âÃû
*/
public static final String DB_NAME = "GodTemper";
/**
* Êý¾Ý¿â°æ±¾
*/
public static final int VERSION = 1;
private static GodTemperDB godTemperDB;
private SQLiteDatabase db;
private GodTemperDB(Context context){
GodTemperOpenHelper dbHelper = new GodTemperOpenHelper(context, DB_NAME, null, VERSION);
db = dbHelper.getWritableDatabase();
}
/**
* »ñÈ¡godTemperDBµÄʵÀý
* @param context
* @return
*/
public synchronized static GodTemperDB getInstance(Context context){
if(godTemperDB == null){
godTemperDB = new GodTemperDB(context);
}
return godTemperDB;
}
/**
* ½«ProvinceʵÀý´æ´¢µ½Êý¾Ý¿â
* @param province
*/
public void saveProvince(Province province){
if(province != null){
ContentValues values = new ContentValues();
values.put("province_name", province.getProvinceName());
values.put("province_code", province.getProvinceCode());
db.insert("Province", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶Áȡȫ¹úËùÓÐÊ¡·ÝµÄÐÅÏ¢
* @return
*/
public List<Province>loadProvinces(){
List<Province>list = new ArrayList<Province>();
Cursor cursor = db.query("Province", null, null, null, null, null, null);
if(cursor.moveToFirst()){
do{
Province province = new Province();
province.setId(cursor.getInt(cursor.getColumnIndex("id")));
province.setProvinceName(cursor.getString(cursor.getColumnIndex("province_name")));
province.setProvinceCode(cursor.getString(cursor.getColumnIndex("province_code")));
list.add(province);
}while(cursor.moveToNext());
}
return list;
}
/**
* ½«CityʵÀý´æ´¢µ½Êý¾Ý¿â
* @param city
*/
public void saveCity(City city) {
if(city!=null){
ContentValues values = new ContentValues();
values.put("city_name", city.getCityName());
values.put("city_code", city.getCityCode());
values.put("province_id", city.getProvinceId());
db.insert("City", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶ÁȡijʡÏÂËùÓеijÇÊÐÐÅÏ¢
* @param provinceId
* @return
*/
public List<City> loadCities(int provinceId) {
List<City>list = new ArrayList<City>();
Cursor cursor = db.query("City", null, "province_id = ?",
new String[]{String.valueOf(provinceId)}, null,null,null);
if(cursor.moveToFirst()){
do{
City city = new City();
city.setId(cursor.getInt(cursor.getColumnIndex("id")));
city.setCityName(cursor.getString(cursor.getColumnIndex("city_name")));
city.setCityCode(cursor.getString(cursor.getColumnIndex("city_code")));
city.setProvinceId(provinceId);
list.add(city);
}while(cursor.moveToNext());
}
return list;
}
/**
* ½«CountyʵÀý´æ´¢µ½Êý¾Ý¿â
*/
public void saveCounty(County county){
if(county != null){
ContentValues values = new ContentValues();
values.put("county_name", county.getCountyName());
values.put("county_code", county.getCountyCode());
values.put("city_id", county.getCityId());
db.insert("County", null, values);
}
}
/**
* ´ÓÊý¾Ý¿â¶Áȡij³ÇÊÐÏÂËùÓÐÏØµÄÐÅÏ¢
*/
public List<County>loadCounties (int cityId){
List<County>list = new ArrayList<County>();
Cursor cursor = db.query("County", null, "city_id = ?",
new String[]{String.valueOf(cityId)}, null, null, null);
if(cursor.moveToFirst()){
do{
County county = new County();
county.setId(cursor.getInt(cursor.getColumnIndex("id")));
county.setCountyName(cursor.getString(cursor.getColumnIndex("county_name")));
county.setCountyCode(cursor.getString(cursor.getColumnIndex("county_code")));
county.setCityId(cityId);
list.add(county);
}while(cursor.moveToNext());
}
return list;
}
}
| GodisGod/godtemper | src/com/example/godtemper/db/GodTemperDB.java | Java | apache-2.0 | 4,037 |
<?php
return array(
//'配置项'=>'配置值'
/* 调试设定 */
'SHOW_PAGE_TRACE' =>false, // 显示页面Trace信息
// 'SHOW_RUN_TIME'=>true, // 运行时间显示
// 'SHOW_ADV_TIME'=>true, // 显示详细的运行时间
// 'SHOW_DB_TIMES'=>true, // 显示数据库查询和写入次数
// 'SHOW_CACHE_TIMES'=>true, // 显示缓存操作次数
// 'SHOW_USE_MEM'=>true, // 显示内存开销
// 'SHOW_LOAD_FILE' =>true, // 显示加载文件数
// 'SHOW_FUN_TIMES'=>true , // 显示函数调用次数
/* 数据库设置 */
'DB_TYPE' => 'mysql', // 数据库类型
'DB_HOST' => '127.0.0.1', // 服务器地址
'DB_NAME' => 'ts', // 数据库名
'DB_USER' => 'root', // 用户名
'DB_PWD' => '123456', // 密码
'DB_PORT' => '3306', // 端口
'DB_PREFIX' => 'treesys_', // 数据库表前缀
'DB_FIELDTYPE_CHECK' => false, // 是否进行字段类型检查
'DB_FIELDS_CACHE' => true, // 启用字段缓存
'DB_CHARSET' => 'utf8', // 数据库编码默认采用utf8
'DB_DEPLOY_TYPE' => 0, // 数据库部署方式:0 集中式(单一服务器),1 分布式(主从服务器)
'DB_RW_SEPARATE' => false, // 数据库读写是否分离 主从式有效
'DB_MASTER_NUM' => 1, // 读写分离后 主服务器数量
'DB_SQL_BUILD_CACHE' => true, // 数据库查询的SQL创建缓存
'DB_SQL_BUILD_QUEUE' => 'file', // SQL缓存队列的缓存方式 支持 file xcache和apc
'DB_SQL_BUILD_LENGTH' => 20, // SQL缓存的队列长度
'VAR_PAGE' =>'p', //分页参数
);
?>
| fuermolv/treesys | Application/Common/Conf/db.php | PHP | apache-2.0 | 1,794 |
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "exegesis/x86/cleanup_instruction_set_fix_operands.h"
#include <algorithm>
#include <iterator>
#include <string>
#include <vector>
#include "absl/container/flat_hash_map.h"
#include "absl/container/flat_hash_set.h"
#include "absl/container/node_hash_map.h"
#include "absl/status/status.h"
#include "absl/strings/str_cat.h"
#include "exegesis/base/cleanup_instruction_set.h"
#include "exegesis/proto/instructions.pb.h"
#include "exegesis/util/instruction_syntax.h"
#include "exegesis/x86/cleanup_instruction_set_utils.h"
#include "glog/logging.h"
#include "src/google/protobuf/repeated_field.h"
#include "util/gtl/map_util.h"
namespace exegesis {
namespace x86 {
namespace {
using ::google::protobuf::RepeatedPtrField;
// Mapping from memory operands to their sizes as used in the Intel assembly
// syntax.
const std::pair<const char*, const char*> kOperandToPointerSize[] = {
{"m8", "BYTE"}, {"m16", "WORD"}, {"m32", "DWORD"}, {"m64", "QWORD"}};
// List of RSI-indexed source arrays.
const char* kRSIIndexes[] = {"BYTE PTR [RSI]", "WORD PTR [RSI]",
"DWORD PTR [RSI]", "QWORD PTR [RSI]"};
// List of RDI-indexed destination arrays.
const char* kRDIIndexes[] = {"BYTE PTR [RDI]", "WORD PTR [RDI]",
"DWORD PTR [RDI]", "QWORD PTR [RDI]"};
} // namespace
absl::Status FixOperandsOfCmpsAndMovs(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
const absl::flat_hash_set<std::string> kMnemonics = {"CMPS", "MOVS"};
const absl::flat_hash_set<std::string> kSourceOperands(
std::begin(kRSIIndexes), std::begin(kRSIIndexes));
const absl::flat_hash_set<std::string> kDestinationOperands(
std::begin(kRDIIndexes), std::begin(kRDIIndexes));
const absl::flat_hash_map<std::string, std::string> operand_to_pointer_size(
std::begin(kOperandToPointerSize), std::end(kOperandToPointerSize));
absl::Status status = absl::OkStatus();
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
if (!kMnemonics.contains(vendor_syntax->mnemonic())) {
continue;
}
if (vendor_syntax->operands_size() != 2) {
status = absl::InvalidArgumentError(
"Unexpected number of operands of a CMPS/MOVS instruction.");
LOG(ERROR) << status;
continue;
}
std::string pointer_size;
if (!gtl::FindCopy(operand_to_pointer_size,
vendor_syntax->operands(0).name(), &pointer_size) &&
!kSourceOperands.contains(vendor_syntax->operands(0).name()) &&
!kDestinationOperands.contains(vendor_syntax->operands(0).name())) {
status = absl::InvalidArgumentError(
absl::StrCat("Unexpected operand of a CMPS/MOVS instruction: ",
vendor_syntax->operands(0).name()));
LOG(ERROR) << status;
continue;
}
CHECK_EQ(vendor_syntax->operands_size(), 2);
// The correct syntax for MOVS is MOVSB BYTE PTR [RDI],BYTE PTR [RSI]
// (destination is the right operand, as expected in the Intel syntax),
// while for CMPS LLVM only supports CMPSB BYTE PTR [RSI],BYTE PTR [RDI].
// The following handles this.
constexpr const char* const kIndexings[] = {"[RDI]", "[RSI]"};
const int dest = vendor_syntax->mnemonic() == "MOVS" ? 0 : 1;
const int src = 1 - dest;
vendor_syntax->mutable_operands(0)->set_name(
absl::StrCat(pointer_size, " PTR ", kIndexings[dest]));
vendor_syntax->mutable_operands(0)->set_usage(
dest == 0 ? InstructionOperand::USAGE_WRITE
: InstructionOperand::USAGE_READ);
vendor_syntax->mutable_operands(1)->set_name(
absl::StrCat(pointer_size, " PTR ", kIndexings[src]));
vendor_syntax->mutable_operands(1)->set_usage(
InstructionOperand::USAGE_READ);
}
return status;
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfCmpsAndMovs, 2000);
absl::Status FixOperandsOfInsAndOuts(InstructionSetProto* instruction_set) {
constexpr char kIns[] = "INS";
constexpr char kOuts[] = "OUTS";
const absl::flat_hash_map<std::string, std::string> operand_to_pointer_size(
std::begin(kOperandToPointerSize), std::end(kOperandToPointerSize));
absl::Status status = absl::OkStatus();
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
const bool is_ins = vendor_syntax->mnemonic() == kIns;
const bool is_outs = vendor_syntax->mnemonic() == kOuts;
if (!is_ins && !is_outs) {
continue;
}
if (vendor_syntax->operands_size() != 2) {
status = absl::InvalidArgumentError(
"Unexpected number of operands of an INS/OUTS instruction.");
LOG(ERROR) << status;
continue;
}
std::string pointer_size;
if (!gtl::FindCopy(operand_to_pointer_size,
vendor_syntax->operands(0).name(), &pointer_size) &&
!gtl::FindCopy(operand_to_pointer_size,
vendor_syntax->operands(1).name(), &pointer_size)) {
status = absl::InvalidArgumentError(
absl::StrCat("Unexpected operands of an INS/OUTS instruction: ",
vendor_syntax->operands(0).name(), ", ",
vendor_syntax->operands(1).name()));
LOG(ERROR) << status;
continue;
}
CHECK_EQ(vendor_syntax->operands_size(), 2);
if (is_ins) {
vendor_syntax->mutable_operands(0)->set_name(
absl::StrCat(pointer_size, " PTR [RDI]"));
vendor_syntax->mutable_operands(0)->set_usage(
InstructionOperand::USAGE_WRITE);
vendor_syntax->mutable_operands(1)->set_name("DX");
vendor_syntax->mutable_operands(1)->set_usage(
InstructionOperand::USAGE_READ);
} else {
CHECK(is_outs);
vendor_syntax->mutable_operands(0)->set_name("DX");
vendor_syntax->mutable_operands(0)->set_usage(
InstructionOperand::USAGE_READ);
vendor_syntax->mutable_operands(1)->set_name(
absl::StrCat(pointer_size, " PTR [RSI]"));
vendor_syntax->mutable_operands(1)->set_usage(
InstructionOperand::USAGE_READ);
}
}
return status;
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfInsAndOuts, 2000);
absl::Status FixOperandsOfLddqu(InstructionSetProto* instruction_set) {
constexpr char kMemOperand[] = "mem";
constexpr char kM128Operand[] = "m128";
constexpr char kLddquEncoding[] = "F2 0F F0 /r";
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
if (instruction.raw_encoding_specification() != kLddquEncoding) continue;
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
for (InstructionOperand& operand : *vendor_syntax->mutable_operands()) {
if (operand.name() == kMemOperand) {
operand.set_name(kM128Operand);
}
}
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfLddqu, 2000);
absl::Status FixOperandsOfLodsScasAndStos(
InstructionSetProto* instruction_set) {
// Note that we're matching only the versions with operands. These versions
// use the mnemonics without the size suffix. By matching exactly these names,
// we can easily avoid the operand-less versions.
constexpr char kLods[] = "LODS";
constexpr char kScas[] = "SCAS";
constexpr char kStos[] = "STOS";
const absl::flat_hash_map<std::string, std::string> operand_to_pointer_size(
std::begin(kOperandToPointerSize), std::end(kOperandToPointerSize));
const absl::flat_hash_map<std::string, std::string> kOperandToRegister = {
{"m8", "AL"}, {"m16", "AX"}, {"m32", "EAX"}, {"m64", "RAX"}};
absl::Status status = absl::OkStatus();
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
const bool is_lods = vendor_syntax->mnemonic() == kLods;
const bool is_stos = vendor_syntax->mnemonic() == kStos;
const bool is_scas = vendor_syntax->mnemonic() == kScas;
if (!is_lods && !is_stos && !is_scas) {
continue;
}
if (vendor_syntax->operands_size() != 1) {
status = absl::InvalidArgumentError(
"Unexpected number of operands of a LODS/STOS instruction.");
LOG(ERROR) << status;
continue;
}
std::string register_operand;
std::string pointer_size;
if (!gtl::FindCopy(kOperandToRegister, vendor_syntax->operands(0).name(),
®ister_operand) ||
!gtl::FindCopy(operand_to_pointer_size,
vendor_syntax->operands(0).name(), &pointer_size)) {
status = absl::InvalidArgumentError(
absl::StrCat("Unexpected operand of a LODS/STOS instruction: ",
vendor_syntax->operands(0).name()));
LOG(ERROR) << status;
continue;
}
vendor_syntax->clear_operands();
if (is_stos) {
auto* const operand = vendor_syntax->add_operands();
operand->set_name(absl::StrCat(pointer_size, " PTR [RDI]"));
operand->set_encoding(InstructionOperand::IMPLICIT_ENCODING);
operand->set_usage(InstructionOperand::USAGE_READ);
}
auto* const operand = vendor_syntax->add_operands();
operand->set_encoding(InstructionOperand::IMPLICIT_ENCODING);
operand->set_name(register_operand);
operand->set_usage(InstructionOperand::USAGE_READ);
if (is_lods) {
auto* const operand = vendor_syntax->add_operands();
operand->set_encoding(InstructionOperand::IMPLICIT_ENCODING);
operand->set_name(absl::StrCat(pointer_size, " PTR [RSI]"));
operand->set_usage(InstructionOperand::USAGE_READ);
}
if (is_scas) {
auto* const operand = vendor_syntax->add_operands();
operand->set_encoding(InstructionOperand::IMPLICIT_ENCODING);
operand->set_name(absl::StrCat(pointer_size, " PTR [RDI]"));
operand->set_usage(InstructionOperand::USAGE_READ);
}
}
return status;
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfLodsScasAndStos, 2000);
absl::Status FixOperandsOfSgdtAndSidt(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
const absl::flat_hash_set<std::string> kEncodings = {"0F 01 /0", "0F 01 /1"};
constexpr char kMemoryOperandName[] = "m";
constexpr char kUpdatedMemoryOperandName[] = "m16&64";
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
if (kEncodings.contains(instruction.raw_encoding_specification())) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
for (InstructionOperand& operand : *vendor_syntax->mutable_operands()) {
if (operand.name() == kMemoryOperandName) {
operand.set_name(kUpdatedMemoryOperandName);
}
}
}
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfSgdtAndSidt, 2000);
absl::Status FixOperandsOfVMovq(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
constexpr char kVMovQEncoding[] = "VEX.128.F3.0F.WIG 7E /r";
constexpr char kRegisterOrMemoryOperand[] = "xmm2/m64";
::google::protobuf::RepeatedPtrField<InstructionProto>* const instructions =
instruction_set->mutable_instructions();
for (InstructionProto& instruction : *instructions) {
if (instruction.raw_encoding_specification() != kVMovQEncoding) continue;
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
if (vendor_syntax->operands_size() != 2) {
return absl::InvalidArgumentError(
absl::StrCat("Unexpected number of operands of a VMOVQ instruction: ",
instruction.DebugString()));
}
vendor_syntax->mutable_operands(1)->set_name(kRegisterOrMemoryOperand);
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixOperandsOfVMovq, 2000);
absl::Status FixRegOperands(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
constexpr char kR8Operand[] = "r8";
constexpr char kR16Operand[] = "r16";
constexpr char kR32Operand[] = "r32";
constexpr char kR64Operand[] = "r64";
constexpr char kRegOperand[] = "reg";
// The mnemonics for which we add new entries.
const absl::flat_hash_set<std::string> kExpandToAllSizes = {"LAR"};
// The mnemonics for which we just replace reg with r8/r16/r32.
const absl::flat_hash_set<std::string> kRenameToReg8 = {"VPBROADCASTB"};
const absl::flat_hash_set<std::string> kRenameToReg16 = {"VPBROADCASTW"};
const absl::flat_hash_set<std::string> kRenameToReg32 = {
"EXTRACTPS", "MOVMSKPD", "MOVMSKPS", "PEXTRB", "PEXTRW", "PMOVMSKB",
"VMOVMSKPD", "VMOVMSKPS", "VPEXTRB", "VPEXTRW", "VPMOVMSKB"};
// We can't safely add new entries to 'instructions' while we iterate over it.
// Instead, we collect the instructions in a separate vector and add it to the
// proto at the end.
std::vector<InstructionProto> new_instruction_protos;
::google::protobuf::RepeatedPtrField<InstructionProto>* const instructions =
instruction_set->mutable_instructions();
absl::Status status = absl::OkStatus();
for (InstructionProto& instruction : *instructions) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
const std::string& mnemonic = vendor_syntax->mnemonic();
for (auto& operand : *vendor_syntax->mutable_operands()) {
if (operand.name() == kRegOperand) {
if (kExpandToAllSizes.contains(mnemonic)) {
// This is a bit hacky. To avoid complicated matching of registers, we
// just override the existing entry in the instruction set proto, add
// the modified proto to new_instruction_protos except for the last
// modification which we keep in the instruction set proto.
//
// This is safe as long as there is only one reg operand per entry
// (which is true in the current version of the data).
operand.set_name(kR32Operand);
new_instruction_protos.push_back(instruction);
operand.set_name(kR64Operand);
instruction.set_raw_encoding_specification(
"REX.W + " + instruction.raw_encoding_specification());
} else if (kRenameToReg8.contains(mnemonic)) {
operand.set_name(kR8Operand);
} else if (kRenameToReg16.contains(mnemonic)) {
operand.set_name(kR16Operand);
} else if (kRenameToReg32.contains(mnemonic)) {
operand.set_name(kR32Operand);
} else {
status = absl::InvalidArgumentError(
absl::StrCat("Unexpected instruction mnemonic: ", mnemonic));
LOG(ERROR) << status;
continue;
}
}
}
}
std::copy(new_instruction_protos.begin(), new_instruction_protos.end(),
::google::protobuf::RepeatedPtrFieldBackInserter(instructions));
return status;
}
REGISTER_INSTRUCTION_SET_TRANSFORM(FixRegOperands, 2000);
absl::Status RenameOperands(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
const absl::flat_hash_map<std::string, std::string> kOperandRenaming = {
// Synonyms (different names used for the same type in different parts of
// the manual).
{"m80dec", "m80bcd"},
{"r8/m8", "r/m8"},
{"r16/m16", "r/m16"},
{"r32/m32", "r/m32"},
{"r64/m64", "r/m64"},
{"ST", "ST(0)"},
// Variants that depend on the mode of the CPU. The 32- and 64-bit modes
// always use the larger of the two values.
{"m14/28byte", "m28byte"},
{"m94/108byte", "m108byte"}};
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
InstructionFormat* const vendor_syntax =
GetOrAddUniqueVendorSyntaxOrDie(&instruction);
for (auto& operand : *vendor_syntax->mutable_operands()) {
const std::string* renaming =
gtl::FindOrNull(kOperandRenaming, operand.name());
if (renaming != nullptr) {
operand.set_name(*renaming);
}
}
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(RenameOperands, 2000);
absl::Status RemoveImplicitST0Operand(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
static constexpr char kImplicitST0Operand[] = "ST(0)";
const absl::flat_hash_set<std::string> kUpdatedInstructionEncodings = {
"D8 C0+i", "D8 C8+i", "D8 E0+i", "D8 E8+i", "D8 F0+i", "D8 F8+i",
"DB E8+i", "DB F0+i", "DE C0+i", "DE C8+i", "DE E0+i", "DE E8+i",
"DE F0+i", "DE F8+i", "DF E8+i", "DF F0+i",
};
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
if (!kUpdatedInstructionEncodings.contains(
instruction.raw_encoding_specification())) {
continue;
}
RepeatedPtrField<InstructionOperand>* const operands =
GetOrAddUniqueVendorSyntaxOrDie(&instruction)->mutable_operands();
operands->erase(std::remove_if(operands->begin(), operands->end(),
[](const InstructionOperand& operand) {
return operand.name() ==
kImplicitST0Operand;
}),
operands->end());
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(RemoveImplicitST0Operand, 2000);
absl::Status RemoveImplicitOperands(InstructionSetProto* instruction_set) {
CHECK(instruction_set != nullptr);
const absl::flat_hash_set<absl::string_view> kImplicitXmmOperands = {
"<EAX>", "<XMM0>", "<XMM0-2>", "<XMM0-6>", "<XMM0-7>", "<XMM4-6>"};
for (InstructionProto& instruction :
*instruction_set->mutable_instructions()) {
RepeatedPtrField<InstructionOperand>* const operands =
GetOrAddUniqueVendorSyntaxOrDie(&instruction)->mutable_operands();
operands->erase(
std::remove_if(
operands->begin(), operands->end(),
[&kImplicitXmmOperands](const InstructionOperand& operand) {
return kImplicitXmmOperands.contains(operand.name());
}),
operands->end());
}
return absl::OkStatus();
}
REGISTER_INSTRUCTION_SET_TRANSFORM(RemoveImplicitOperands, 2000);
} // namespace x86
} // namespace exegesis
| google/EXEgesis | exegesis/x86/cleanup_instruction_set_fix_operands.cc | C++ | apache-2.0 | 19,133 |
require 'spec_helper'
describe Rtime::Executor do
let :executor do
Rtime::Executor.new
end
describe 'execute' do
it 'needs arguments to execute' do
expect { executor.execute }.to raise_error ArgumentError
end
it 'times the executed process' do
expect(executor.timer).to receive(:time).and_call_original
executor.execute 'true'
end
it 'returns a filled Rtime::Result' do
result = executor.execute('true', name: 'the name')
expect(result).to be_a Rtime::Result
expect(result.start).to be_a Time
expect(result.pid).to respond_to :to_int
expect(result.duration).to be_a Float
expect(result.exitstatus).to eq 0
expect(result.name).to eq 'the name'
end
end
describe 'measure' do
it 'needs a name argument to execute' do
expect { executor.measure do end }.to raise_error ArgumentError
end
it 'needs a block to call' do
expect { executor.measure('foo') }.to raise_error ArgumentError
end
it 'measures times of a block' do
executed = false
result = executor.measure('the name') do
executed = true
end
expect(executed).to eq true
expect(result).to be_a Rtime::Result
expect(result.start).to be_a Time
expect(result.pid).to eq $$
expect(result.duration).to be_a Float
expect(result.exitstatus).to eq 0
expect(result.name).to eq 'the name'
end
end
end
| flori/rtime | spec/executor_spec.rb | Ruby | apache-2.0 | 1,451 |
package org.axway.grapes.server.webapp.resources;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.WebResource;
import com.yammer.dropwizard.auth.basic.BasicAuthProvider;
import com.yammer.dropwizard.testing.ResourceTest;
import org.axway.grapes.commons.api.ServerAPI;
import org.axway.grapes.server.GrapesTestUtils;
import org.axway.grapes.server.config.GrapesServerConfig;
import org.axway.grapes.server.core.options.FiltersHolder;
import org.axway.grapes.server.db.RepositoryHandler;
import org.axway.grapes.server.db.datamodel.DbCredential;
import org.axway.grapes.server.db.datamodel.DbSearch;
import org.axway.grapes.server.webapp.auth.GrapesAuthenticator;
import org.eclipse.jetty.http.HttpStatus;
import org.junit.Test;
import javax.ws.rs.core.MediaType;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class WebSearchResourceTest extends ResourceTest {
private RepositoryHandler repositoryHandler;
@Override
protected void setUpResources() throws Exception {
repositoryHandler = GrapesTestUtils.getRepoHandlerMock();
final GrapesServerConfig config = mock(GrapesServerConfig.class);
final WebSearchResource resource = new WebSearchResource(repositoryHandler, config);
addProvider(new BasicAuthProvider<DbCredential>(new GrapesAuthenticator(repositoryHandler), "test auth"));
addResource(resource);
}
@Test
public void getSearchResult() throws Exception {
List<String> moduleIds = new ArrayList<>();
moduleIds.add("testSearch_id_1");
moduleIds.add("testSearch_id_2");
List<String> artifactIds = new ArrayList<>();
artifactIds.add("testSearch_artifact_id_1");
artifactIds.add("testSearch_artifact_id_2");
DbSearch search = new DbSearch();
search.setModules(moduleIds);
search.setArtifacts(artifactIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results);
}
@Test
public void getNullSearchResult() {
DbSearch search = new DbSearch();
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":null,\"artifacts\":null}", results);
}
@Test
public void getModulesSearchResult() {
DbSearch search = new DbSearch();
List<String> moduleIds = new ArrayList<>();
moduleIds.add("testSearch_id_1");
moduleIds.add("testSearch_id_2");
search.setModules(moduleIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":[\"testSearch_id_1\",\"testSearch_id_2\"],\"artifacts\":null}", results);
}
@Test
public void getArtifactsSearchResult() {
DbSearch search = new DbSearch();
List<String> artifactIds = new ArrayList<>();
artifactIds.add("testSearch_artifact_id_1");
artifactIds.add("testSearch_artifact_id_2");
search.setArtifacts(artifactIds);
when(repositoryHandler.getSearchResult(eq("testSearch"), (FiltersHolder) anyObject())).thenReturn(search);
final WebResource resource = client().resource("/" + ServerAPI.SEARCH_RESOURCE + "/testSearch");
final ClientResponse response = resource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(response);
assertEquals(HttpStatus.OK_200, response.getStatus());
final String results = response.getEntity(new GenericType<String>() {
});
assertEquals("{\"modules\":null,\"artifacts\":[\"testSearch_artifact_id_1\",\"testSearch_artifact_id_2\"]}", results);
}
} | Axway/Grapes | server/src/test/java/org/axway/grapes/server/webapp/resources/WebSearchResourceTest.java | Java | apache-2.0 | 5,448 |
// Code generated by MockGen. DO NOT EDIT.
// Source: github.com/cri-o/cri-o/internal/lib/sandbox (interfaces: NamespaceIface)
// Package sandboxmock is a generated GoMock package.
package sandboxmock
import (
sandbox "github.com/cri-o/cri-o/internal/lib/sandbox"
gomock "github.com/golang/mock/gomock"
reflect "reflect"
)
// MockNamespaceIface is a mock of NamespaceIface interface
type MockNamespaceIface struct {
ctrl *gomock.Controller
recorder *MockNamespaceIfaceMockRecorder
}
// MockNamespaceIfaceMockRecorder is the mock recorder for MockNamespaceIface
type MockNamespaceIfaceMockRecorder struct {
mock *MockNamespaceIface
}
// NewMockNamespaceIface creates a new mock instance
func NewMockNamespaceIface(ctrl *gomock.Controller) *MockNamespaceIface {
mock := &MockNamespaceIface{ctrl: ctrl}
mock.recorder = &MockNamespaceIfaceMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (m *MockNamespaceIface) EXPECT() *MockNamespaceIfaceMockRecorder {
return m.recorder
}
// Close mocks base method
func (m *MockNamespaceIface) Close() error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Close")
ret0, _ := ret[0].(error)
return ret0
}
// Close indicates an expected call of Close
func (mr *MockNamespaceIfaceMockRecorder) Close() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockNamespaceIface)(nil).Close))
}
// Get mocks base method
func (m *MockNamespaceIface) Get() *sandbox.Namespace {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Get")
ret0, _ := ret[0].(*sandbox.Namespace)
return ret0
}
// Get indicates an expected call of Get
func (mr *MockNamespaceIfaceMockRecorder) Get() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockNamespaceIface)(nil).Get))
}
// Initialize mocks base method
func (m *MockNamespaceIface) Initialize() sandbox.NamespaceIface {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Initialize")
ret0, _ := ret[0].(sandbox.NamespaceIface)
return ret0
}
// Initialize indicates an expected call of Initialize
func (mr *MockNamespaceIfaceMockRecorder) Initialize() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Initialize", reflect.TypeOf((*MockNamespaceIface)(nil).Initialize))
}
// Initialized mocks base method
func (m *MockNamespaceIface) Initialized() bool {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Initialized")
ret0, _ := ret[0].(bool)
return ret0
}
// Initialized indicates an expected call of Initialized
func (mr *MockNamespaceIfaceMockRecorder) Initialized() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Initialized", reflect.TypeOf((*MockNamespaceIface)(nil).Initialized))
}
// Path mocks base method
func (m *MockNamespaceIface) Path() string {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Path")
ret0, _ := ret[0].(string)
return ret0
}
// Path indicates an expected call of Path
func (mr *MockNamespaceIfaceMockRecorder) Path() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Path", reflect.TypeOf((*MockNamespaceIface)(nil).Path))
}
// Remove mocks base method
func (m *MockNamespaceIface) Remove() error {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Remove")
ret0, _ := ret[0].(error)
return ret0
}
// Remove indicates an expected call of Remove
func (mr *MockNamespaceIfaceMockRecorder) Remove() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Remove", reflect.TypeOf((*MockNamespaceIface)(nil).Remove))
}
// Type mocks base method
func (m *MockNamespaceIface) Type() sandbox.NSType {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Type")
ret0, _ := ret[0].(sandbox.NSType)
return ret0
}
// Type indicates an expected call of Type
func (mr *MockNamespaceIfaceMockRecorder) Type() *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Type", reflect.TypeOf((*MockNamespaceIface)(nil).Type))
}
| mikebrow/cri-o | test/mocks/sandbox/sandbox.go | GO | apache-2.0 | 4,124 |
#!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# This example gets all account budget proposals. To add an account budget
# proposal, run AddAccountBudgetProposal.rb.
require 'optparse'
require 'google/ads/google_ads'
def get_account_budget_proposals(customer_id)
# GoogleAdsClient will read a config file from
# ENV['HOME']/google_ads_config.rb when called without parameters
client = Google::Ads::GoogleAds::GoogleAdsClient.new
ga_service = client.service.google_ads
search_query = <<~QUERY
SELECT
account_budget_proposal.id,
account_budget_proposal.account_budget,
account_budget_proposal.billing_setup,
account_budget_proposal.status,
account_budget_proposal.proposed_name,
account_budget_proposal.proposed_notes,
account_budget_proposal.proposed_purchase_order_number,
account_budget_proposal.proposal_type,
account_budget_proposal.approval_date_time,
account_budget_proposal.creation_date_time
FROM account_budget_proposal
QUERY
response = ga_service.search(
customer_id: customer_id,
query: search_query,
page_size: PAGE_SIZE,
)
response.each do |row|
account_budget_proposal = row.account_budget_proposal
puts sprintf('Account budget proposal with ID %s, status %s, '\
'account_budget %s, billing_setup %s, proposed_name %s, '\
'proposed_notes %s, proposed_po_number %s, proposal_type %s, '\
'approval_date_time %s, creation_date_time %s',
account_budget_proposal.id,
account_budget_proposal.account_budget,
account_budget_proposal.billing_setup,
account_budget_proposal.status,
account_budget_proposal.proposed_name,
account_budget_proposal.proposed_notes,
account_budget_proposal.proposed_purchase_order_number,
account_budget_proposal.proposal_type,
account_budget_proposal.approval_date_time,
account_budget_proposal.creation_date_time
)
end
end
if __FILE__ == $0
PAGE_SIZE = 1000
options = {}
# The following parameter(s) should be provided to run the example. You can
# either specify these by changing the INSERT_XXX_ID_HERE values below, or on
# the command line.
#
# Parameters passed on the command line will override any parameters set in
# code.
#
# Running the example with -h will print the command line usage.
options[:customer_id] = 'INSERT_CUSTOMER_ID_HERE'
options[:ad_group_id] = nil
OptionParser.new do |opts|
opts.banner = sprintf('Usage: %s [options]', File.basename(__FILE__))
opts.separator ''
opts.separator 'Options:'
opts.on('-C', '--customer-id CUSTOMER-ID', String, 'Customer ID') do |v|
options[:customer_id] = v
end
opts.separator ''
opts.separator 'Help:'
opts.on_tail('-h', '--help', 'Show this message') do
puts opts
exit
end
end.parse!
begin
get_account_budget_proposals(options.fetch(:customer_id).tr("-", ""))
rescue Google::Ads::GoogleAds::Errors::GoogleAdsError => e
e.failure.errors.each do |error|
STDERR.printf("Error with message: %s\n", error.message)
if error.location
error.location.field_path_elements.each do |field_path_element|
STDERR.printf("\tOn field: %s\n", field_path_element.field_name)
end
end
error.error_code.to_h.each do |k, v|
next if v == :UNSPECIFIED
STDERR.printf("\tType: %s\n\tCode: %s\n", k, v)
end
end
raise
end
end
| googleads/google-ads-ruby | examples/billing/get_account_budget_proposals.rb | Ruby | apache-2.0 | 4,070 |
// Copyright 2017 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Factory for creating new frontend instances of Interaction
* domain objects.
*/
oppia.factory('InteractionObjectFactory', [
'AnswerGroupObjectFactory', 'HintObjectFactory', 'OutcomeObjectFactory',
'SolutionObjectFactory',
function(
AnswerGroupObjectFactory, HintObjectFactory, OutcomeObjectFactory,
SolutionObjectFactory) {
var Interaction = function(
answerGroups, confirmedUnclassifiedAnswers, customizationArgs,
defaultOutcome, hints, id, solution) {
this.answerGroups = answerGroups;
this.confirmedUnclassifiedAnswers = confirmedUnclassifiedAnswers;
this.customizationArgs = customizationArgs;
this.defaultOutcome = defaultOutcome;
this.hints = hints;
this.id = id;
this.solution = solution;
};
Interaction.prototype.toBackendDict = function() {
return {
answer_groups: this.answerGroups.map(function(answerGroup) {
return answerGroup.toBackendDict();
}),
confirmed_unclassified_answers: this.confirmedUnclassifiedAnswers,
customization_args: this.customizationArgs,
default_outcome:
this.defaultOutcome ? this.defaultOutcome.toBackendDict() : null,
hints: this.hints.map(function(hint) {
return hint.toBackendDict();
}),
id: this.id,
solution: this.solution ? this.solution.toBackendDict() : null
};
};
Interaction.createFromBackendDict = function(interactionDict) {
var defaultOutcome;
if (interactionDict.default_outcome) {
defaultOutcome = OutcomeObjectFactory.createFromBackendDict(
interactionDict.default_outcome);
} else {
defaultOutcome = null;
}
return new Interaction(
generateAnswerGroupsFromBackend(interactionDict.answer_groups),
interactionDict.confirmed_unclassified_answers,
interactionDict.customization_args,
defaultOutcome,
generateHintsFromBackend(interactionDict.hints),
interactionDict.id,
interactionDict.solution ? (
generateSolutionFromBackend(interactionDict.solution)) : null);
};
var generateAnswerGroupsFromBackend = function(answerGroupBackendDicts) {
return answerGroupBackendDicts.map(function(
answerGroupBackendDict) {
return AnswerGroupObjectFactory.createFromBackendDict(
answerGroupBackendDict);
});
};
var generateHintsFromBackend = function(hintBackendDicts) {
return hintBackendDicts.map(function(hintBackendDict) {
return HintObjectFactory.createFromBackendDict(hintBackendDict);
});
};
var generateSolutionFromBackend = function(solutionBackendDict) {
return SolutionObjectFactory.createFromBackendDict(solutionBackendDict);
};
return Interaction;
}
]);
| AllanYangZhou/oppia | core/templates/dev/head/domain/exploration/InteractionObjectFactory.js | JavaScript | apache-2.0 | 3,481 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.MachineLearningGetResultsIT;
import org.elasticsearch.client.MachineLearningIT;
import org.elasticsearch.client.MlTestStateCleaner;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.core.PageParams;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.ml.CloseJobRequest;
import org.elasticsearch.client.ml.CloseJobResponse;
import org.elasticsearch.client.ml.DeleteCalendarEventRequest;
import org.elasticsearch.client.ml.DeleteCalendarJobRequest;
import org.elasticsearch.client.ml.DeleteCalendarRequest;
import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.DeleteDatafeedRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataRequest;
import org.elasticsearch.client.ml.DeleteExpiredDataResponse;
import org.elasticsearch.client.ml.DeleteFilterRequest;
import org.elasticsearch.client.ml.DeleteForecastRequest;
import org.elasticsearch.client.ml.DeleteJobRequest;
import org.elasticsearch.client.ml.DeleteJobResponse;
import org.elasticsearch.client.ml.DeleteModelSnapshotRequest;
import org.elasticsearch.client.ml.DeleteTrainedModelRequest;
import org.elasticsearch.client.ml.EstimateModelMemoryRequest;
import org.elasticsearch.client.ml.EstimateModelMemoryResponse;
import org.elasticsearch.client.ml.EvaluateDataFrameRequest;
import org.elasticsearch.client.ml.EvaluateDataFrameResponse;
import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.FindFileStructureRequest;
import org.elasticsearch.client.ml.FindFileStructureResponse;
import org.elasticsearch.client.ml.FlushJobRequest;
import org.elasticsearch.client.ml.FlushJobResponse;
import org.elasticsearch.client.ml.ForecastJobRequest;
import org.elasticsearch.client.ml.ForecastJobResponse;
import org.elasticsearch.client.ml.GetBucketsRequest;
import org.elasticsearch.client.ml.GetBucketsResponse;
import org.elasticsearch.client.ml.GetCalendarEventsRequest;
import org.elasticsearch.client.ml.GetCalendarEventsResponse;
import org.elasticsearch.client.ml.GetCalendarsRequest;
import org.elasticsearch.client.ml.GetCalendarsResponse;
import org.elasticsearch.client.ml.GetCategoriesRequest;
import org.elasticsearch.client.ml.GetCategoriesResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest;
import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse;
import org.elasticsearch.client.ml.GetDatafeedRequest;
import org.elasticsearch.client.ml.GetDatafeedResponse;
import org.elasticsearch.client.ml.GetDatafeedStatsRequest;
import org.elasticsearch.client.ml.GetDatafeedStatsResponse;
import org.elasticsearch.client.ml.GetFiltersRequest;
import org.elasticsearch.client.ml.GetFiltersResponse;
import org.elasticsearch.client.ml.GetInfluencersRequest;
import org.elasticsearch.client.ml.GetInfluencersResponse;
import org.elasticsearch.client.ml.GetJobRequest;
import org.elasticsearch.client.ml.GetJobResponse;
import org.elasticsearch.client.ml.GetJobStatsRequest;
import org.elasticsearch.client.ml.GetJobStatsResponse;
import org.elasticsearch.client.ml.GetModelSnapshotsRequest;
import org.elasticsearch.client.ml.GetModelSnapshotsResponse;
import org.elasticsearch.client.ml.GetOverallBucketsRequest;
import org.elasticsearch.client.ml.GetOverallBucketsResponse;
import org.elasticsearch.client.ml.GetRecordsRequest;
import org.elasticsearch.client.ml.GetRecordsResponse;
import org.elasticsearch.client.ml.GetTrainedModelsRequest;
import org.elasticsearch.client.ml.GetTrainedModelsResponse;
import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest;
import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse;
import org.elasticsearch.client.ml.MlInfoRequest;
import org.elasticsearch.client.ml.MlInfoResponse;
import org.elasticsearch.client.ml.OpenJobRequest;
import org.elasticsearch.client.ml.OpenJobResponse;
import org.elasticsearch.client.ml.PostCalendarEventRequest;
import org.elasticsearch.client.ml.PostCalendarEventResponse;
import org.elasticsearch.client.ml.PostDataRequest;
import org.elasticsearch.client.ml.PostDataResponse;
import org.elasticsearch.client.ml.PreviewDatafeedRequest;
import org.elasticsearch.client.ml.PreviewDatafeedResponse;
import org.elasticsearch.client.ml.PutCalendarJobRequest;
import org.elasticsearch.client.ml.PutCalendarRequest;
import org.elasticsearch.client.ml.PutCalendarResponse;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.PutDatafeedRequest;
import org.elasticsearch.client.ml.PutDatafeedResponse;
import org.elasticsearch.client.ml.PutFilterRequest;
import org.elasticsearch.client.ml.PutFilterResponse;
import org.elasticsearch.client.ml.PutJobRequest;
import org.elasticsearch.client.ml.PutJobResponse;
import org.elasticsearch.client.ml.PutTrainedModelRequest;
import org.elasticsearch.client.ml.PutTrainedModelResponse;
import org.elasticsearch.client.ml.RevertModelSnapshotRequest;
import org.elasticsearch.client.ml.RevertModelSnapshotResponse;
import org.elasticsearch.client.ml.SetUpgradeModeRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StartDatafeedRequest;
import org.elasticsearch.client.ml.StartDatafeedResponse;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest;
import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse;
import org.elasticsearch.client.ml.StopDatafeedRequest;
import org.elasticsearch.client.ml.StopDatafeedResponse;
import org.elasticsearch.client.ml.UpdateDatafeedRequest;
import org.elasticsearch.client.ml.UpdateFilterRequest;
import org.elasticsearch.client.ml.UpdateJobRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotRequest;
import org.elasticsearch.client.ml.UpdateModelSnapshotResponse;
import org.elasticsearch.client.ml.calendars.Calendar;
import org.elasticsearch.client.ml.calendars.ScheduledEvent;
import org.elasticsearch.client.ml.calendars.ScheduledEventTests;
import org.elasticsearch.client.ml.datafeed.ChunkingConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedConfig;
import org.elasticsearch.client.ml.datafeed.DatafeedStats;
import org.elasticsearch.client.ml.datafeed.DatafeedUpdate;
import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig;
import org.elasticsearch.client.ml.dataframe.Classification;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsSource;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsState;
import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats;
import org.elasticsearch.client.ml.dataframe.OutlierDetection;
import org.elasticsearch.client.ml.dataframe.QueryConfig;
import org.elasticsearch.client.ml.dataframe.Regression;
import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation;
import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.ActualClass;
import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric.PredictedClass;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.AucRocMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.BinarySoftClassification;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.ConfusionMatrixMetric.ConfusionMatrix;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.PrecisionMetric;
import org.elasticsearch.client.ml.dataframe.evaluation.softclassification.RecallMetric;
import org.elasticsearch.client.ml.dataframe.explain.FieldSelection;
import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation;
import org.elasticsearch.client.ml.filestructurefinder.FileStructure;
import org.elasticsearch.client.ml.inference.InferenceToXContentCompressor;
import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider;
import org.elasticsearch.client.ml.inference.TrainedModelConfig;
import org.elasticsearch.client.ml.inference.TrainedModelDefinition;
import org.elasticsearch.client.ml.inference.TrainedModelDefinitionTests;
import org.elasticsearch.client.ml.inference.TrainedModelInput;
import org.elasticsearch.client.ml.inference.TrainedModelStats;
import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig;
import org.elasticsearch.client.ml.inference.trainedmodel.TargetType;
import org.elasticsearch.client.ml.job.config.AnalysisConfig;
import org.elasticsearch.client.ml.job.config.AnalysisLimits;
import org.elasticsearch.client.ml.job.config.DataDescription;
import org.elasticsearch.client.ml.job.config.DetectionRule;
import org.elasticsearch.client.ml.job.config.Detector;
import org.elasticsearch.client.ml.job.config.Job;
import org.elasticsearch.client.ml.job.config.JobUpdate;
import org.elasticsearch.client.ml.job.config.MlFilter;
import org.elasticsearch.client.ml.job.config.ModelPlotConfig;
import org.elasticsearch.client.ml.job.config.Operator;
import org.elasticsearch.client.ml.job.config.RuleCondition;
import org.elasticsearch.client.ml.job.process.DataCounts;
import org.elasticsearch.client.ml.job.process.ModelSnapshot;
import org.elasticsearch.client.ml.job.results.AnomalyRecord;
import org.elasticsearch.client.ml.job.results.Bucket;
import org.elasticsearch.client.ml.job.results.CategoryDefinition;
import org.elasticsearch.client.ml.job.results.Influencer;
import org.elasticsearch.client.ml.job.results.OverallBucket;
import org.elasticsearch.client.ml.job.stats.JobStats;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.tasks.TaskId;
import org.junit.After;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.core.Is.is;
public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase {
@After
public void cleanUp() throws IOException {
new MlTestStateCleaner(logger, highLevelClient().machineLearning()).clearMlMetadata();
}
public void testCreateJob() throws Exception {
RestHighLevelClient client = highLevelClient();
// tag::put-job-detector
Detector.Builder detectorBuilder = new Detector.Builder()
.setFunction("sum") // <1>
.setFieldName("total") // <2>
.setDetectorDescription("Sum of total"); // <3>
// end::put-job-detector
// tag::put-job-analysis-config
List<Detector> detectors = Collections.singletonList(detectorBuilder.build()); // <1>
AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(detectors) // <2>
.setBucketSpan(TimeValue.timeValueMinutes(10)); // <3>
// end::put-job-analysis-config
// tag::put-job-data-description
DataDescription.Builder dataDescriptionBuilder = new DataDescription.Builder()
.setTimeField("timestamp"); // <1>
// end::put-job-data-description
{
String id = "job_1";
// tag::put-job-config
Job.Builder jobBuilder = new Job.Builder(id) // <1>
.setAnalysisConfig(analysisConfigBuilder) // <2>
.setDataDescription(dataDescriptionBuilder) // <3>
.setDescription("Total sum of requests"); // <4>
// end::put-job-config
// tag::put-job-request
PutJobRequest request = new PutJobRequest(jobBuilder.build()); // <1>
// end::put-job-request
// tag::put-job-execute
PutJobResponse response = client.machineLearning().putJob(request, RequestOptions.DEFAULT);
// end::put-job-execute
// tag::put-job-response
Date createTime = response.getResponse().getCreateTime(); // <1>
// end::put-job-response
assertThat(createTime.getTime(), greaterThan(0L));
}
{
String id = "job_2";
Job.Builder jobBuilder = new Job.Builder(id)
.setAnalysisConfig(analysisConfigBuilder)
.setDataDescription(dataDescriptionBuilder)
.setDescription("Total sum of requests");
PutJobRequest request = new PutJobRequest(jobBuilder.build());
// tag::put-job-execute-listener
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
@Override
public void onResponse(PutJobResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-job-execute-async
client.machineLearning().putJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-job1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::get-job-request
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*"); // <1>
request.setAllowNoJobs(true); // <2>
// end::get-job-request
// tag::get-job-execute
GetJobResponse response = client.machineLearning().getJob(request, RequestOptions.DEFAULT);
// end::get-job-execute
// tag::get-job-response
long numberOfJobs = response.count(); // <1>
List<Job> jobs = response.jobs(); // <2>
// end::get-job-response
assertEquals(2, response.count());
assertThat(response.jobs(), hasSize(2));
assertThat(response.jobs().stream().map(Job::getId).collect(Collectors.toList()),
containsInAnyOrder(job.getId(), secondJob.getId()));
}
{
GetJobRequest request = new GetJobRequest("get-machine-learning-job1", "get-machine-learning-job*");
// tag::get-job-execute-listener
ActionListener<GetJobResponse> listener = new ActionListener<GetJobResponse>() {
@Override
public void onResponse(GetJobResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-job-execute-async
client.machineLearning().getJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteJob() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "my-first-machine-learning-job";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
//tag::delete-job-request
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-first-machine-learning-job"); // <1>
//end::delete-job-request
//tag::delete-job-request-force
deleteJobRequest.setForce(false); // <1>
//end::delete-job-request-force
//tag::delete-job-request-wait-for-completion
deleteJobRequest.setWaitForCompletion(true); // <1>
//end::delete-job-request-wait-for-completion
//tag::delete-job-execute
DeleteJobResponse deleteJobResponse = client.machineLearning().deleteJob(deleteJobRequest, RequestOptions.DEFAULT);
//end::delete-job-execute
//tag::delete-job-response
Boolean isAcknowledged = deleteJobResponse.getAcknowledged(); // <1>
TaskId task = deleteJobResponse.getTask(); // <2>
//end::delete-job-response
assertTrue(isAcknowledged);
assertNull(task);
}
{
//tag::delete-job-execute-listener
ActionListener<DeleteJobResponse> listener = new ActionListener<DeleteJobResponse>() {
@Override
public void onResponse(DeleteJobResponse deleteJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteJobRequest deleteJobRequest = new DeleteJobRequest("my-second-machine-learning-job");
// tag::delete-job-execute-async
client.machineLearning().deleteJobAsync(deleteJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testOpenJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("opening-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("opening-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::open-job-request
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-first-machine-learning-job"); // <1>
openJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <2>
// end::open-job-request
// tag::open-job-execute
OpenJobResponse openJobResponse = client.machineLearning().openJob(openJobRequest, RequestOptions.DEFAULT);
// end::open-job-execute
// tag::open-job-response
boolean isOpened = openJobResponse.isOpened(); // <1>
String node = openJobResponse.getNode(); // <2>
// end::open-job-response
assertThat(node, notNullValue());
}
{
// tag::open-job-execute-listener
ActionListener<OpenJobResponse> listener = new ActionListener<OpenJobResponse>() {
@Override
public void onResponse(OpenJobResponse openJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::open-job-execute-listener
OpenJobRequest openJobRequest = new OpenJobRequest("opening-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::open-job-execute-async
client.machineLearning().openJobAsync(openJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::open-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCloseJob() throws Exception {
RestHighLevelClient client = highLevelClient();
{
Job job = MachineLearningIT.buildJob("closing-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
// tag::close-job-request
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-first-machine-learning-job", "otherjobs*"); // <1>
closeJobRequest.setForce(false); // <2>
closeJobRequest.setAllowNoJobs(true); // <3>
closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10)); // <4>
// end::close-job-request
// tag::close-job-execute
CloseJobResponse closeJobResponse = client.machineLearning().closeJob(closeJobRequest, RequestOptions.DEFAULT);
// end::close-job-execute
// tag::close-job-response
boolean isClosed = closeJobResponse.isClosed(); // <1>
// end::close-job-response
}
{
Job job = MachineLearningIT.buildJob("closing-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
// tag::close-job-execute-listener
ActionListener<CloseJobResponse> listener = new ActionListener<CloseJobResponse>() {
@Override
public void onResponse(CloseJobResponse closeJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::close-job-execute-listener
CloseJobRequest closeJobRequest = new CloseJobRequest("closing-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::close-job-execute-async
client.machineLearning().closeJobAsync(closeJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::close-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateJob() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "test-update-job";
Job tempJob = MachineLearningIT.buildJob(jobId);
Job job = new Job.Builder(tempJob)
.setAnalysisConfig(new AnalysisConfig.Builder(tempJob.getAnalysisConfig())
.setCategorizationFieldName("categorization-field")
.setDetector(0,
new Detector.Builder().setFieldName("total")
.setFunction("sum")
.setPartitionFieldName("mlcategory")
.setDetectorDescription(randomAlphaOfLength(10))
.build()))
.build();
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
{
List<DetectionRule> detectionRules = Arrays.asList(
new DetectionRule.Builder(Arrays.asList(RuleCondition.createTime(Operator.GT, 100L))).build());
Map<String, Object> customSettings = new HashMap<>();
customSettings.put("custom-setting-1", "custom-value");
// tag::update-job-detector-options
JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, // <1>
"detector description", // <2>
detectionRules); // <3>
// end::update-job-detector-options
// tag::update-job-options
JobUpdate update = new JobUpdate.Builder(jobId) // <1>
.setDescription("My description") // <2>
.setAnalysisLimits(new AnalysisLimits(1000L, null)) // <3>
.setBackgroundPersistInterval(TimeValue.timeValueHours(3)) // <4>
.setCategorizationFilters(Arrays.asList("categorization-filter")) // <5>
.setDetectorUpdates(Arrays.asList(detectorUpdate)) // <6>
.setGroups(Arrays.asList("job-group-1")) // <7>
.setResultsRetentionDays(10L) // <8>
.setModelPlotConfig(new ModelPlotConfig(true, null, true)) // <9>
.setModelSnapshotRetentionDays(7L) // <10>
.setCustomSettings(customSettings) // <11>
.setRenormalizationWindowDays(3L) // <12>
.build();
// end::update-job-options
// tag::update-job-request
UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); // <1>
// end::update-job-request
// tag::update-job-execute
PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT);
// end::update-job-execute
// tag::update-job-response
Job updatedJob = updateJobResponse.getResponse(); // <1>
// end::update-job-response
assertEquals(update.getDescription(), updatedJob.getDescription());
}
{
// tag::update-job-execute-listener
ActionListener<PutJobResponse> listener = new ActionListener<PutJobResponse>() {
@Override
public void onResponse(PutJobResponse updateJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-job-execute-listener
UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build());
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-job-execute-async
client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::update-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// We need to create a job for the datafeed request to be valid
String jobId = "put-datafeed-job-1";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String id = "datafeed-1";
// tag::put-datafeed-config
DatafeedConfig.Builder datafeedBuilder = new DatafeedConfig.Builder(id, jobId) // <1>
.setIndices("index_1", "index_2"); // <2>
// end::put-datafeed-config
AggregatorFactories.Builder aggs = AggregatorFactories.builder();
// tag::put-datafeed-config-set-aggregations
datafeedBuilder.setAggregations(aggs); // <1>
// end::put-datafeed-config-set-aggregations
// Clearing aggregation to avoid complex validation rules
datafeedBuilder.setAggregations((String) null);
// tag::put-datafeed-config-set-chunking-config
datafeedBuilder.setChunkingConfig(ChunkingConfig.newAuto()); // <1>
// end::put-datafeed-config-set-chunking-config
// tag::put-datafeed-config-set-frequency
datafeedBuilder.setFrequency(TimeValue.timeValueSeconds(30)); // <1>
// end::put-datafeed-config-set-frequency
// tag::put-datafeed-config-set-query
datafeedBuilder.setQuery(QueryBuilders.matchAllQuery()); // <1>
// end::put-datafeed-config-set-query
// tag::put-datafeed-config-set-query-delay
datafeedBuilder.setQueryDelay(TimeValue.timeValueMinutes(1)); // <1>
// end::put-datafeed-config-set-query-delay
// tag::put-datafeed-config-set-delayed-data-check-config
datafeedBuilder.setDelayedDataCheckConfig(DelayedDataCheckConfig
.enabledDelayedDataCheckConfig(TimeValue.timeValueHours(1))); // <1>
// end::put-datafeed-config-set-delayed-data-check-config
// no need to accidentally trip internal validations due to job bucket size
datafeedBuilder.setDelayedDataCheckConfig(null);
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
// tag::put-datafeed-config-set-script-fields
datafeedBuilder.setScriptFields(scriptFields); // <1>
// end::put-datafeed-config-set-script-fields
// tag::put-datafeed-config-set-scroll-size
datafeedBuilder.setScrollSize(1000); // <1>
// end::put-datafeed-config-set-scroll-size
// tag::put-datafeed-request
PutDatafeedRequest request = new PutDatafeedRequest(datafeedBuilder.build()); // <1>
// end::put-datafeed-request
// tag::put-datafeed-execute
PutDatafeedResponse response = client.machineLearning().putDatafeed(request, RequestOptions.DEFAULT);
// end::put-datafeed-execute
// tag::put-datafeed-response
DatafeedConfig datafeed = response.getResponse(); // <1>
// end::put-datafeed-response
assertThat(datafeed.getId(), equalTo("datafeed-1"));
}
{
// We need to create a job for the datafeed request to be valid
String jobId = "put-datafeed-job-2";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String id = "datafeed-2";
DatafeedConfig datafeed = new DatafeedConfig.Builder(id, jobId).setIndices("index_1", "index_2").build();
PutDatafeedRequest request = new PutDatafeedRequest(datafeed);
// tag::put-datafeed-execute-listener
ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() {
@Override
public void onResponse(PutDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-datafeed-execute-async
client.machineLearning().putDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("update-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
AggregatorFactories.Builder aggs = AggregatorFactories.builder();
List<SearchSourceBuilder.ScriptField> scriptFields = Collections.emptyList();
// tag::update-datafeed-config
DatafeedUpdate.Builder datafeedUpdateBuilder = new DatafeedUpdate.Builder(datafeedId) // <1>
.setAggregations(aggs) // <2>
.setIndices("index_1", "index_2") // <3>
.setChunkingConfig(ChunkingConfig.newAuto()) // <4>
.setFrequency(TimeValue.timeValueSeconds(30)) // <5>
.setQuery(QueryBuilders.matchAllQuery()) // <6>
.setQueryDelay(TimeValue.timeValueMinutes(1)) // <7>
.setScriptFields(scriptFields) // <8>
.setScrollSize(1000); // <9>
// end::update-datafeed-config
// Clearing aggregation to avoid complex validation rules
datafeedUpdateBuilder.setAggregations((String) null);
// tag::update-datafeed-request
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdateBuilder.build()); // <1>
// end::update-datafeed-request
// tag::update-datafeed-execute
PutDatafeedResponse response = client.machineLearning().updateDatafeed(request, RequestOptions.DEFAULT);
// end::update-datafeed-execute
// tag::update-datafeed-response
DatafeedConfig updatedDatafeed = response.getResponse(); // <1>
// end::update-datafeed-response
assertThat(updatedDatafeed.getId(), equalTo(datafeedId));
}
{
DatafeedUpdate datafeedUpdate = new DatafeedUpdate.Builder(datafeedId).setIndices("index_1", "index_2").build();
UpdateDatafeedRequest request = new UpdateDatafeedRequest(datafeedUpdate);
// tag::update-datafeed-execute-listener
ActionListener<PutDatafeedResponse> listener = new ActionListener<PutDatafeedResponse>() {
@Override
public void onResponse(PutDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-datafeed-execute-async
client.machineLearning().updateDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId()).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::get-datafeed-request
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId); // <1>
request.setAllowNoDatafeeds(true); // <2>
// end::get-datafeed-request
// tag::get-datafeed-execute
GetDatafeedResponse response = client.machineLearning().getDatafeed(request, RequestOptions.DEFAULT);
// end::get-datafeed-execute
// tag::get-datafeed-response
long numberOfDatafeeds = response.count(); // <1>
List<DatafeedConfig> datafeeds = response.datafeeds(); // <2>
// end::get-datafeed-response
assertEquals(1, numberOfDatafeeds);
assertEquals(1, datafeeds.size());
}
{
GetDatafeedRequest request = new GetDatafeedRequest(datafeedId);
// tag::get-datafeed-execute-listener
ActionListener<GetDatafeedResponse> listener = new ActionListener<GetDatafeedResponse>() {
@Override
public void onResponse(GetDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-datafeed-execute-async
client.machineLearning().getDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-datafeed-job";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = "test-delete-datafeed";
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, jobId).setIndices("foo").build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::delete-datafeed-request
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
deleteDatafeedRequest.setForce(false); // <1>
// end::delete-datafeed-request
// tag::delete-datafeed-execute
AcknowledgedResponse deleteDatafeedResponse = client.machineLearning().deleteDatafeed(
deleteDatafeedRequest, RequestOptions.DEFAULT);
// end::delete-datafeed-execute
// tag::delete-datafeed-response
boolean isAcknowledged = deleteDatafeedResponse.isAcknowledged(); // <1>
// end::delete-datafeed-response
}
// Recreate datafeed to allow second deletion
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::delete-datafeed-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteDatafeedRequest deleteDatafeedRequest = new DeleteDatafeedRequest(datafeedId);
// tag::delete-datafeed-execute-async
client.machineLearning().deleteDatafeedAsync(deleteDatafeedRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPreviewDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("preview-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "preview_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
{
// tag::preview-datafeed-request
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId); // <1>
// end::preview-datafeed-request
// tag::preview-datafeed-execute
PreviewDatafeedResponse response = client.machineLearning().previewDatafeed(request, RequestOptions.DEFAULT);
// end::preview-datafeed-execute
// tag::preview-datafeed-response
BytesReference rawPreview = response.getPreview(); // <1>
List<Map<String, Object>> semiParsedPreview = response.getDataList(); // <2>
// end::preview-datafeed-response
assertTrue(semiParsedPreview.isEmpty());
}
{
PreviewDatafeedRequest request = new PreviewDatafeedRequest(datafeedId);
// tag::preview-datafeed-execute-listener
ActionListener<PreviewDatafeedResponse> listener = new ActionListener<PreviewDatafeedResponse>() {
@Override
public void onResponse(PreviewDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::preview-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::preview-datafeed-execute-async
client.machineLearning().previewDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::preview-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("start-datafeed-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
String datafeedId = job.getId() + "-feed";
String indexName = "start_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::start-datafeed-request
StartDatafeedRequest request = new StartDatafeedRequest(datafeedId); // <1>
// end::start-datafeed-request
// tag::start-datafeed-request-options
request.setEnd("2018-08-21T00:00:00Z"); // <1>
request.setStart("2018-08-20T00:00:00Z"); // <2>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <3>
// end::start-datafeed-request-options
// tag::start-datafeed-execute
StartDatafeedResponse response = client.machineLearning().startDatafeed(request, RequestOptions.DEFAULT);
// end::start-datafeed-execute
// tag::start-datafeed-response
boolean started = response.isStarted(); // <1>
String node = response.getNode(); // <2>
// end::start-datafeed-response
assertTrue(started);
assertThat(node, notNullValue());
}
{
StartDatafeedRequest request = new StartDatafeedRequest(datafeedId);
// tag::start-datafeed-execute-listener
ActionListener<StartDatafeedResponse> listener = new ActionListener<StartDatafeedResponse>() {
@Override
public void onResponse(StartDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-datafeed-execute-async
client.machineLearning().startDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::start-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStopDatafeed() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::stop-datafeed-request
StopDatafeedRequest request = new StopDatafeedRequest("datafeed_id1", "datafeed_id*"); // <1>
// end::stop-datafeed-request
request = StopDatafeedRequest.stopAllDatafeedsRequest();
// tag::stop-datafeed-request-options
request.setAllowNoDatafeeds(true); // <1>
request.setForce(true); // <2>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <3>
// end::stop-datafeed-request-options
// tag::stop-datafeed-execute
StopDatafeedResponse response = client.machineLearning().stopDatafeed(request, RequestOptions.DEFAULT);
// end::stop-datafeed-execute
// tag::stop-datafeed-response
boolean stopped = response.isStopped(); // <1>
// end::stop-datafeed-response
assertTrue(stopped);
}
{
StopDatafeedRequest request = StopDatafeedRequest.stopAllDatafeedsRequest();
// tag::stop-datafeed-execute-listener
ActionListener<StopDatafeedResponse> listener = new ActionListener<StopDatafeedResponse>() {
@Override
public void onResponse(StopDatafeedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-datafeed-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::stop-datafeed-execute-async
client.machineLearning().stopDatafeedAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-datafeed-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDatafeedStats() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-datafeed-stats2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
String datafeedId1 = job.getId() + "-feed";
String indexName = "datafeed_stats_data_2";
createIndex(indexName);
DatafeedConfig datafeed = DatafeedConfig.builder(datafeedId1, job.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(datafeed), RequestOptions.DEFAULT);
String datafeedId2 = secondJob.getId() + "-feed";
DatafeedConfig secondDatafeed = DatafeedConfig.builder(datafeedId2, secondJob.getId())
.setIndices(indexName)
.build();
client.machineLearning().putDatafeed(new PutDatafeedRequest(secondDatafeed), RequestOptions.DEFAULT);
{
//tag::get-datafeed-stats-request
GetDatafeedStatsRequest request =
new GetDatafeedStatsRequest("get-machine-learning-datafeed-stats1-feed", "get-machine-learning-datafeed*"); // <1>
request.setAllowNoDatafeeds(true); // <2>
//end::get-datafeed-stats-request
//tag::get-datafeed-stats-execute
GetDatafeedStatsResponse response = client.machineLearning().getDatafeedStats(request, RequestOptions.DEFAULT);
//end::get-datafeed-stats-execute
//tag::get-datafeed-stats-response
long numberOfDatafeedStats = response.count(); // <1>
List<DatafeedStats> datafeedStats = response.datafeedStats(); // <2>
//end::get-datafeed-stats-response
assertEquals(2, response.count());
assertThat(response.datafeedStats(), hasSize(2));
assertThat(response.datafeedStats().stream().map(DatafeedStats::getDatafeedId).collect(Collectors.toList()),
containsInAnyOrder(datafeed.getId(), secondDatafeed.getId()));
}
{
GetDatafeedStatsRequest request = new GetDatafeedStatsRequest("*");
// tag::get-datafeed-stats-execute-listener
ActionListener<GetDatafeedStatsResponse> listener = new ActionListener<GetDatafeedStatsResponse>() {
@Override
public void onResponse(GetDatafeedStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-datafeed-stats-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-datafeed-stats-execute-async
client.machineLearning().getDatafeedStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-datafeed-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetBuckets() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-buckets";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a bucket
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-buckets\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 80.0}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-buckets-request
GetBucketsRequest request = new GetBucketsRequest(jobId); // <1>
// end::get-buckets-request
// tag::get-buckets-timestamp
request.setTimestamp("2018-08-17T00:00:00Z"); // <1>
// end::get-buckets-timestamp
// Set timestamp to null as it is incompatible with other args
request.setTimestamp(null);
// tag::get-buckets-anomaly-score
request.setAnomalyScore(75.0); // <1>
// end::get-buckets-anomaly-score
// tag::get-buckets-desc
request.setDescending(true); // <1>
// end::get-buckets-desc
// tag::get-buckets-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-buckets-end
// tag::get-buckets-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-buckets-exclude-interim
// tag::get-buckets-expand
request.setExpand(true); // <1>
// end::get-buckets-expand
// tag::get-buckets-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-buckets-page
// Set page params back to null so the response contains the bucket we indexed
request.setPageParams(null);
// tag::get-buckets-sort
request.setSort("anomaly_score"); // <1>
// end::get-buckets-sort
// tag::get-buckets-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-buckets-start
// tag::get-buckets-execute
GetBucketsResponse response = client.machineLearning().getBuckets(request, RequestOptions.DEFAULT);
// end::get-buckets-execute
// tag::get-buckets-response
long count = response.count(); // <1>
List<Bucket> buckets = response.buckets(); // <2>
// end::get-buckets-response
assertEquals(1, buckets.size());
}
{
GetBucketsRequest request = new GetBucketsRequest(jobId);
// tag::get-buckets-execute-listener
ActionListener<GetBucketsResponse> listener =
new ActionListener<GetBucketsResponse>() {
@Override
public void onResponse(GetBucketsResponse getBucketsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-buckets-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-buckets-execute-async
client.machineLearning().getBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-buckets-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testFlushJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("flushing-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("flushing-my-second-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(secondJob.getId()), RequestOptions.DEFAULT);
{
// tag::flush-job-request
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-first-machine-learning-job"); // <1>
// end::flush-job-request
// tag::flush-job-request-options
flushJobRequest.setCalcInterim(true); // <1>
flushJobRequest.setAdvanceTime("2018-08-31T16:35:07+00:00"); // <2>
flushJobRequest.setStart("2018-08-31T16:35:17+00:00"); // <3>
flushJobRequest.setEnd("2018-08-31T16:35:27+00:00"); // <4>
flushJobRequest.setSkipTime("2018-08-31T16:35:00+00:00"); // <5>
// end::flush-job-request-options
// tag::flush-job-execute
FlushJobResponse flushJobResponse = client.machineLearning().flushJob(flushJobRequest, RequestOptions.DEFAULT);
// end::flush-job-execute
// tag::flush-job-response
boolean isFlushed = flushJobResponse.isFlushed(); // <1>
Date lastFinalizedBucketEnd = flushJobResponse.getLastFinalizedBucketEnd(); // <2>
// end::flush-job-response
}
{
// tag::flush-job-execute-listener
ActionListener<FlushJobResponse> listener = new ActionListener<FlushJobResponse>() {
@Override
public void onResponse(FlushJobResponse FlushJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::flush-job-execute-listener
FlushJobRequest flushJobRequest = new FlushJobRequest("flushing-my-second-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::flush-job-execute-async
client.machineLearning().flushJobAsync(flushJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::flush-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteForecast() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("deleting-forecast-for-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
for(int i = 0; i < 30; i++) {
Map<String, Object> hashMap = new HashMap<>();
hashMap.put("total", randomInt(1000));
hashMap.put("timestamp", (i+1)*1000);
builder.addDoc(hashMap);
}
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
ForecastJobResponse forecastJobResponse = client.machineLearning().
forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT);
String forecastId = forecastJobResponse.getForecastId();
GetRequest request = new GetRequest(".ml-anomalies-" + job.getId());
request.id(job.getId() + "_model_forecast_request_stats_" + forecastId);
assertBusy(() -> {
GetResponse getResponse = highLevelClient().get(request, RequestOptions.DEFAULT);
assertTrue(getResponse.isExists());
assertTrue(getResponse.getSourceAsString().contains("finished"));
}, 30, TimeUnit.SECONDS);
{
// tag::delete-forecast-request
DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest("deleting-forecast-for-job"); // <1>
// end::delete-forecast-request
// tag::delete-forecast-request-options
deleteForecastRequest.setForecastIds(forecastId); // <1>
deleteForecastRequest.timeout("30s"); // <2>
deleteForecastRequest.setAllowNoForecasts(true); // <3>
// end::delete-forecast-request-options
// tag::delete-forecast-execute
AcknowledgedResponse deleteForecastResponse = client.machineLearning().deleteForecast(deleteForecastRequest,
RequestOptions.DEFAULT);
// end::delete-forecast-execute
// tag::delete-forecast-response
boolean isAcknowledged = deleteForecastResponse.isAcknowledged(); // <1>
// end::delete-forecast-response
}
{
// tag::delete-forecast-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse DeleteForecastResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-forecast-execute-listener
DeleteForecastRequest deleteForecastRequest = DeleteForecastRequest.deleteAllForecasts(job.getId());
deleteForecastRequest.setAllowNoForecasts(true);
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-forecast-execute-async
client.machineLearning().deleteForecastAsync(deleteForecastRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-forecast-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetJobStats() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("get-machine-learning-job-stats1");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
Job secondJob = MachineLearningIT.buildJob("get-machine-learning-job-stats2");
client.machineLearning().putJob(new PutJobRequest(secondJob), RequestOptions.DEFAULT);
{
// tag::get-job-stats-request
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*"); // <1>
request.setAllowNoJobs(true); // <2>
// end::get-job-stats-request
// tag::get-job-stats-execute
GetJobStatsResponse response = client.machineLearning().getJobStats(request, RequestOptions.DEFAULT);
// end::get-job-stats-execute
// tag::get-job-stats-response
long numberOfJobStats = response.count(); // <1>
List<JobStats> jobStats = response.jobStats(); // <2>
// end::get-job-stats-response
assertEquals(2, response.count());
assertThat(response.jobStats(), hasSize(2));
assertThat(response.jobStats().stream().map(JobStats::getJobId).collect(Collectors.toList()),
containsInAnyOrder(job.getId(), secondJob.getId()));
}
{
GetJobStatsRequest request = new GetJobStatsRequest("get-machine-learning-job-stats1", "get-machine-learning-job-*");
// tag::get-job-stats-execute-listener
ActionListener<GetJobStatsResponse> listener = new ActionListener<GetJobStatsResponse>() {
@Override
public void onResponse(GetJobStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-job-stats-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-job-stats-execute-async
client.machineLearning().getJobStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-job-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testForecastJob() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder();
for(int i = 0; i < 30; i++) {
Map<String, Object> hashMap = new HashMap<>();
hashMap.put("total", randomInt(1000));
hashMap.put("timestamp", (i+1)*1000);
builder.addDoc(hashMap);
}
PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder);
client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::forecast-job-request
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); // <1>
// end::forecast-job-request
// tag::forecast-job-request-options
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); // <1>
forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); // <2>
forecastJobRequest.setMaxModelMemory(new ByteSizeValue(30, ByteSizeUnit.MB)); // <3>
// end::forecast-job-request-options
// tag::forecast-job-execute
ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT);
// end::forecast-job-execute
// tag::forecast-job-response
boolean isAcknowledged = forecastJobResponse.isAcknowledged(); // <1>
String forecastId = forecastJobResponse.getForecastId(); // <2>
// end::forecast-job-response
assertTrue(isAcknowledged);
assertNotNull(forecastId);
}
{
// tag::forecast-job-execute-listener
ActionListener<ForecastJobResponse> listener = new ActionListener<ForecastJobResponse>() {
@Override
public void onResponse(ForecastJobResponse forecastJobResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::forecast-job-execute-listener
ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job");
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::forecast-job-execute-async
client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); // <1>
// end::forecast-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetOverallBuckets() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId1 = "test-get-overall-buckets-1";
String jobId2 = "test-get-overall-buckets-2";
Job job1 = MachineLearningGetResultsIT.buildJob(jobId1);
Job job2 = MachineLearningGetResultsIT.buildJob(jobId2);
client.machineLearning().putJob(new PutJobRequest(job1), RequestOptions.DEFAULT);
client.machineLearning().putJob(new PutJobRequest(job2), RequestOptions.DEFAULT);
// Let us index some buckets
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-1\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"anomaly_score\": 60.0}", XContentType.JSON);
bulkRequest.add(indexRequest);
}
{
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.source("{\"job_id\":\"test-get-overall-buckets-2\", \"result_type\":\"bucket\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 3600,\"is_interim\": false, \"anomaly_score\": 100.0}", XContentType.JSON);
bulkRequest.add(indexRequest);
}
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::get-overall-buckets-request
GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2); // <1>
// end::get-overall-buckets-request
// tag::get-overall-buckets-bucket-span
request.setBucketSpan(TimeValue.timeValueHours(24)); // <1>
// end::get-overall-buckets-bucket-span
// tag::get-overall-buckets-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-overall-buckets-end
// tag::get-overall-buckets-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-overall-buckets-exclude-interim
// tag::get-overall-buckets-overall-score
request.setOverallScore(75.0); // <1>
// end::get-overall-buckets-overall-score
// tag::get-overall-buckets-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-overall-buckets-start
// tag::get-overall-buckets-top-n
request.setTopN(2); // <1>
// end::get-overall-buckets-top-n
// tag::get-overall-buckets-execute
GetOverallBucketsResponse response = client.machineLearning().getOverallBuckets(request, RequestOptions.DEFAULT);
// end::get-overall-buckets-execute
// tag::get-overall-buckets-response
long count = response.count(); // <1>
List<OverallBucket> overallBuckets = response.overallBuckets(); // <2>
// end::get-overall-buckets-response
assertEquals(1, overallBuckets.size());
assertThat(overallBuckets.get(0).getOverallScore(), is(closeTo(80.0, 0.001)));
}
{
GetOverallBucketsRequest request = new GetOverallBucketsRequest(jobId1, jobId2);
// tag::get-overall-buckets-execute-listener
ActionListener<GetOverallBucketsResponse> listener =
new ActionListener<GetOverallBucketsResponse>() {
@Override
public void onResponse(GetOverallBucketsResponse getOverallBucketsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-overall-buckets-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-overall-buckets-execute-async
client.machineLearning().getOverallBucketsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-overall-buckets-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetRecords() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-records";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-records\", \"result_type\":\"record\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"record_score\": 80.0}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-records-request
GetRecordsRequest request = new GetRecordsRequest(jobId); // <1>
// end::get-records-request
// tag::get-records-desc
request.setDescending(true); // <1>
// end::get-records-desc
// tag::get-records-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-records-end
// tag::get-records-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-records-exclude-interim
// tag::get-records-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-records-page
// Set page params back to null so the response contains the record we indexed
request.setPageParams(null);
// tag::get-records-record-score
request.setRecordScore(75.0); // <1>
// end::get-records-record-score
// tag::get-records-sort
request.setSort("probability"); // <1>
// end::get-records-sort
// tag::get-records-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-records-start
// tag::get-records-execute
GetRecordsResponse response = client.machineLearning().getRecords(request, RequestOptions.DEFAULT);
// end::get-records-execute
// tag::get-records-response
long count = response.count(); // <1>
List<AnomalyRecord> records = response.records(); // <2>
// end::get-records-response
assertEquals(1, records.size());
}
{
GetRecordsRequest request = new GetRecordsRequest(jobId);
// tag::get-records-execute-listener
ActionListener<GetRecordsResponse> listener =
new ActionListener<GetRecordsResponse>() {
@Override
public void onResponse(GetRecordsResponse getRecordsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-records-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-records-execute-async
client.machineLearning().getRecordsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-records-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPostData() throws Exception {
RestHighLevelClient client = highLevelClient();
Job job = MachineLearningIT.buildJob("test-post-data");
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT);
{
// tag::post-data-request
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); // <1>
Map<String, Object> mapData = new HashMap<>();
mapData.put("total", 109);
jsonBuilder.addDoc(mapData); // <2>
jsonBuilder.addDoc("{\"total\":1000}"); // <3>
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <4>
// end::post-data-request
// tag::post-data-request-options
postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); // <1>
postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); // <2>
// end::post-data-request-options
postDataRequest.setResetEnd(null);
postDataRequest.setResetStart(null);
// tag::post-data-execute
PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT);
// end::post-data-execute
// tag::post-data-response
DataCounts dataCounts = postDataResponse.getDataCounts(); // <1>
// end::post-data-response
assertEquals(2, dataCounts.getInputRecordCount());
}
{
// tag::post-data-execute-listener
ActionListener<PostDataResponse> listener = new ActionListener<PostDataResponse>() {
@Override
public void onResponse(PostDataResponse postDataResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::post-data-execute-listener
PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder();
Map<String, Object> mapData = new HashMap<>();
mapData.put("total", 109);
jsonBuilder.addDoc(mapData);
PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); // <1>
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::post-data-execute-async
client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); // <1>
// end::post-data-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testFindFileStructure() throws Exception {
RestHighLevelClient client = highLevelClient();
Path anInterestingFile = createTempFile();
String contents = "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\"," +
"\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 1\",\"class\":\"ml\"," +
"\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n" +
"{\"logger\":\"controller\",\"timestamp\":1478261151445," +
"\"level\":\"INFO\",\"pid\":42,\"thread\":\"0x7fff7d2a8000\",\"message\":\"message 2\",\"class\":\"ml\"," +
"\"method\":\"core::SomeNoiseMaker\",\"file\":\"Noisemaker.cc\",\"line\":333}\n";
Files.write(anInterestingFile, Collections.singleton(contents), StandardCharsets.UTF_8);
{
// tag::find-file-structure-request
FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest(); // <1>
findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile)); // <2>
// end::find-file-structure-request
// tag::find-file-structure-request-options
findFileStructureRequest.setLinesToSample(500); // <1>
findFileStructureRequest.setExplain(true); // <2>
// end::find-file-structure-request-options
// tag::find-file-structure-execute
FindFileStructureResponse findFileStructureResponse =
client.machineLearning().findFileStructure(findFileStructureRequest, RequestOptions.DEFAULT);
// end::find-file-structure-execute
// tag::find-file-structure-response
FileStructure structure = findFileStructureResponse.getFileStructure(); // <1>
// end::find-file-structure-response
assertEquals(2, structure.getNumLinesAnalyzed());
}
{
// tag::find-file-structure-execute-listener
ActionListener<FindFileStructureResponse> listener = new ActionListener<FindFileStructureResponse>() {
@Override
public void onResponse(FindFileStructureResponse findFileStructureResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::find-file-structure-execute-listener
FindFileStructureRequest findFileStructureRequest = new FindFileStructureRequest();
findFileStructureRequest.setSample(Files.readAllBytes(anInterestingFile));
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::find-file-structure-execute-async
client.machineLearning().findFileStructureAsync(findFileStructureRequest, RequestOptions.DEFAULT, listener); // <1>
// end::find-file-structure-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetInfluencers() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-influencers";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a record
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-influencers\", \"result_type\":\"influencer\", \"timestamp\": 1533081600000," +
"\"bucket_span\": 600,\"is_interim\": false, \"influencer_score\": 80.0, \"influencer_field_name\": \"my_influencer\"," +
"\"influencer_field_value\":\"foo\"}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-influencers-request
GetInfluencersRequest request = new GetInfluencersRequest(jobId); // <1>
// end::get-influencers-request
// tag::get-influencers-desc
request.setDescending(true); // <1>
// end::get-influencers-desc
// tag::get-influencers-end
request.setEnd("2018-08-21T00:00:00Z"); // <1>
// end::get-influencers-end
// tag::get-influencers-exclude-interim
request.setExcludeInterim(true); // <1>
// end::get-influencers-exclude-interim
// tag::get-influencers-influencer-score
request.setInfluencerScore(75.0); // <1>
// end::get-influencers-influencer-score
// tag::get-influencers-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-influencers-page
// Set page params back to null so the response contains the influencer we indexed
request.setPageParams(null);
// tag::get-influencers-sort
request.setSort("probability"); // <1>
// end::get-influencers-sort
// tag::get-influencers-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-influencers-start
// tag::get-influencers-execute
GetInfluencersResponse response = client.machineLearning().getInfluencers(request, RequestOptions.DEFAULT);
// end::get-influencers-execute
// tag::get-influencers-response
long count = response.count(); // <1>
List<Influencer> influencers = response.influencers(); // <2>
// end::get-influencers-response
assertEquals(1, influencers.size());
}
{
GetInfluencersRequest request = new GetInfluencersRequest(jobId);
// tag::get-influencers-execute-listener
ActionListener<GetInfluencersResponse> listener =
new ActionListener<GetInfluencersResponse>() {
@Override
public void onResponse(GetInfluencersResponse getInfluencersResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-influencers-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-influencers-execute-async
client.machineLearning().getInfluencersAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-influencers-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetCategories() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-categories";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a category
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," +
" \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-categories-request
GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1>
// end::get-categories-request
// tag::get-categories-category-id
request.setCategoryId(1L); // <1>
// end::get-categories-category-id
// tag::get-categories-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-categories-page
// Set page params back to null so the response contains the category we indexed
request.setPageParams(null);
// tag::get-categories-execute
GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT);
// end::get-categories-execute
// tag::get-categories-response
long count = response.count(); // <1>
List<CategoryDefinition> categories = response.categories(); // <2>
// end::get-categories-response
assertEquals(1, categories.size());
}
{
GetCategoriesRequest request = new GetCategoriesRequest(jobId);
// tag::get-categories-execute-listener
ActionListener<GetCategoriesResponse> listener =
new ActionListener<GetCategoriesResponse>() {
@Override
public void onResponse(GetCategoriesResponse getcategoriesResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-categories-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-categories-execute-async
client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-categories-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteExpiredData() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-expired-data";
MachineLearningIT.buildJob(jobId);
{
// tag::delete-expired-data-request
DeleteExpiredDataRequest request = new DeleteExpiredDataRequest( // <1>
null, // <2>
1000.0f, // <3>
TimeValue.timeValueHours(12) // <4>
);
// end::delete-expired-data-request
// tag::delete-expired-data-execute
DeleteExpiredDataResponse response = client.machineLearning().deleteExpiredData(request, RequestOptions.DEFAULT);
// end::delete-expired-data-execute
// tag::delete-expired-data-response
boolean deleted = response.getDeleted(); // <1>
// end::delete-expired-data-response
assertTrue(deleted);
}
{
// tag::delete-expired-data-execute-listener
ActionListener<DeleteExpiredDataResponse> listener = new ActionListener<DeleteExpiredDataResponse>() {
@Override
public void onResponse(DeleteExpiredDataResponse deleteExpiredDataResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-expired-data-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteExpiredDataRequest deleteExpiredDataRequest = new DeleteExpiredDataRequest();
// tag::delete-expired-data-execute-async
client.machineLearning().deleteExpiredDataAsync(deleteExpiredDataRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-expired-data-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-delete-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"" + jobId + "\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"" + snapshotId + "\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"" + jobId + "\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-request
DeleteModelSnapshotRequest request = new DeleteModelSnapshotRequest(jobId, snapshotId); // <1>
// end::delete-model-snapshot-request
// tag::delete-model-snapshot-execute
AcknowledgedResponse response = client.machineLearning().deleteModelSnapshot(request, RequestOptions.DEFAULT);
// end::delete-model-snapshot-execute
// tag::delete-model-snapshot-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-model-snapshot-response
assertTrue(isAcknowledged);
}
{
client.index(indexRequest, RequestOptions.DEFAULT);
// tag::delete-model-snapshot-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
DeleteModelSnapshotRequest deleteModelSnapshotRequest = new DeleteModelSnapshotRequest(jobId, "1541587919");
// tag::delete-model-snapshot-execute-async
client.machineLearning().deleteModelSnapshotAsync(deleteModelSnapshotRequest, RequestOptions.DEFAULT, listener); // <1>
// end::delete-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetModelSnapshots() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-get-model-snapshots";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared");
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-get-model-snapshots\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-get-model-snapshots\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::get-model-snapshots-request
GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId); // <1>
// end::get-model-snapshots-request
// tag::get-model-snapshots-snapshot-id
request.setSnapshotId("1541587919"); // <1>
// end::get-model-snapshots-snapshot-id
// Set snapshot id to null as it is incompatible with other args
request.setSnapshotId(null);
// tag::get-model-snapshots-desc
request.setDesc(true); // <1>
// end::get-model-snapshots-desc
// tag::get-model-snapshots-end
request.setEnd("2018-11-07T21:00:00Z"); // <1>
// end::get-model-snapshots-end
// tag::get-model-snapshots-page
request.setPageParams(new PageParams(100, 200)); // <1>
// end::get-model-snapshots-page
// Set page params back to null so the response contains the snapshot we indexed
request.setPageParams(null);
// tag::get-model-snapshots-sort
request.setSort("latest_result_time_stamp"); // <1>
// end::get-model-snapshots-sort
// tag::get-model-snapshots-start
request.setStart("2018-11-07T00:00:00Z"); // <1>
// end::get-model-snapshots-start
// tag::get-model-snapshots-execute
GetModelSnapshotsResponse response = client.machineLearning().getModelSnapshots(request, RequestOptions.DEFAULT);
// end::get-model-snapshots-execute
// tag::get-model-snapshots-response
long count = response.count(); // <1>
List<ModelSnapshot> modelSnapshots = response.snapshots(); // <2>
// end::get-model-snapshots-response
assertEquals(1, modelSnapshots.size());
}
{
GetModelSnapshotsRequest request = new GetModelSnapshotsRequest(jobId);
// tag::get-model-snapshots-execute-listener
ActionListener<GetModelSnapshotsResponse> listener =
new ActionListener<GetModelSnapshotsResponse>() {
@Override
public void onResponse(GetModelSnapshotsResponse getModelSnapshotsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-model-snapshots-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-model-snapshots-execute-async
client.machineLearning().getModelSnapshotsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-model-snapshots-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testRevertModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-revert-model-snapshot";
String snapshotId = "1541587919";
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
String documentId = jobId + "_model_snapshot_" + snapshotId;
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-revert-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false, " +
"\"quantiles\":{\"job_id\":\"test-revert-model-snapshot\", \"timestamp\":1541587919000, " +
"\"quantile_state\":\"state\"}}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::revert-model-snapshot-request
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId); // <1>
// end::revert-model-snapshot-request
// tag::revert-model-snapshot-delete-intervening-results
request.setDeleteInterveningResults(true); // <1>
// end::revert-model-snapshot-delete-intervening-results
// tag::revert-model-snapshot-execute
RevertModelSnapshotResponse response = client.machineLearning().revertModelSnapshot(request, RequestOptions.DEFAULT);
// end::revert-model-snapshot-execute
// tag::revert-model-snapshot-response
ModelSnapshot modelSnapshot = response.getModel(); // <1>
// end::revert-model-snapshot-response
assertEquals(snapshotId, modelSnapshot.getSnapshotId());
assertEquals("State persisted due to job close at 2018-11-07T10:51:59+0000", modelSnapshot.getDescription());
assertEquals(51722, modelSnapshot.getModelSizeStats().getModelBytes());
}
{
RevertModelSnapshotRequest request = new RevertModelSnapshotRequest(jobId, snapshotId);
// tag::revert-model-snapshot-execute-listener
ActionListener<RevertModelSnapshotResponse> listener =
new ActionListener<RevertModelSnapshotResponse>() {
@Override
public void onResponse(RevertModelSnapshotResponse revertModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::revert-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::revert-model-snapshot-execute-async
client.machineLearning().revertModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::revert-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateModelSnapshot() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String jobId = "test-update-model-snapshot";
String snapshotId = "1541587919";
String documentId = jobId + "_model_snapshot_" + snapshotId;
Job job = MachineLearningIT.buildJob(jobId);
client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT);
// Let us index a snapshot
IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared").id(documentId);
indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
indexRequest.source("{\"job_id\":\"test-update-model-snapshot\", \"timestamp\":1541587919000, " +
"\"description\":\"State persisted due to job close at 2018-11-07T10:51:59+0000\", " +
"\"snapshot_id\":\"1541587919\", \"snapshot_doc_count\":1, \"model_size_stats\":{" +
"\"job_id\":\"test-update-model-snapshot\", \"result_type\":\"model_size_stats\",\"model_bytes\":51722, " +
"\"total_by_field_count\":3, \"total_over_field_count\":0, \"total_partition_field_count\":2," +
"\"bucket_allocation_failures_count\":0, \"memory_status\":\"ok\", \"log_time\":1541587919000, " +
"\"timestamp\":1519930800000}, \"latest_record_time_stamp\":1519931700000," +
"\"latest_result_time_stamp\":1519930800000, \"retain\":false}", XContentType.JSON);
client.index(indexRequest, RequestOptions.DEFAULT);
{
// tag::update-model-snapshot-request
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId); // <1>
// end::update-model-snapshot-request
// tag::update-model-snapshot-description
request.setDescription("My Snapshot"); // <1>
// end::update-model-snapshot-description
// tag::update-model-snapshot-retain
request.setRetain(true); // <1>
// end::update-model-snapshot-retain
// tag::update-model-snapshot-execute
UpdateModelSnapshotResponse response = client.machineLearning().updateModelSnapshot(request, RequestOptions.DEFAULT);
// end::update-model-snapshot-execute
// tag::update-model-snapshot-response
boolean acknowledged = response.getAcknowledged(); // <1>
ModelSnapshot modelSnapshot = response.getModel(); // <2>
// end::update-model-snapshot-response
assertTrue(acknowledged);
assertEquals("My Snapshot", modelSnapshot.getDescription()); }
{
UpdateModelSnapshotRequest request = new UpdateModelSnapshotRequest(jobId, snapshotId);
// tag::update-model-snapshot-execute-listener
ActionListener<UpdateModelSnapshotResponse> listener =
new ActionListener<UpdateModelSnapshotResponse>() {
@Override
public void onResponse(UpdateModelSnapshotResponse updateModelSnapshotResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-model-snapshot-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-model-snapshot-execute-async
client.machineLearning().updateModelSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-model-snapshot-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
// tag::put-calendar-request
Calendar calendar = new Calendar("public_holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest request = new PutCalendarRequest(calendar); // <1>
// end::put-calendar-request
// tag::put-calendar-execute
PutCalendarResponse response = client.machineLearning().putCalendar(request, RequestOptions.DEFAULT);
// end::put-calendar-execute
// tag::put-calendar-response
Calendar newCalendar = response.getCalendar(); // <1>
// end::put-calendar-response
assertThat(newCalendar.getId(), equalTo("public_holidays"));
// tag::put-calendar-execute-listener
ActionListener<PutCalendarResponse> listener = new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-calendar-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-calendar-execute-async
client.machineLearning().putCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-calendar-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testPutCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::put-calendar-job-request
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", // <1>
"job_2", "job_group_1"); // <2>
// end::put-calendar-job-request
// tag::put-calendar-job-execute
PutCalendarResponse response = client.machineLearning().putCalendarJob(request, RequestOptions.DEFAULT);
// end::put-calendar-job-execute
// tag::put-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::put-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_1", "job_2", "job_group_1"));
}
{
PutCalendarJobRequest request = new PutCalendarJobRequest("holidays", "job_4");
// tag::put-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse putCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-calendar-job-execute-async
client.machineLearning().putCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendarJob() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays",
Arrays.asList("job_1", "job_group_1", "job_2"),
"A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::delete-calendar-job-request
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", // <1>
"job_1", "job_group_1"); // <2>
// end::delete-calendar-job-request
// tag::delete-calendar-job-execute
PutCalendarResponse response = client.machineLearning().deleteCalendarJob(request, RequestOptions.DEFAULT);
// end::delete-calendar-job-execute
// tag::delete-calendar-job-response
Calendar updatedCalendar = response.getCalendar(); // <1>
// end::delete-calendar-job-response
assertThat(updatedCalendar.getJobIds(), containsInAnyOrder("job_2"));
}
{
DeleteCalendarJobRequest request = new DeleteCalendarJobRequest("holidays", "job_2");
// tag::delete-calendar-job-execute-listener
ActionListener<PutCalendarResponse> listener =
new ActionListener<PutCalendarResponse>() {
@Override
public void onResponse(PutCalendarResponse deleteCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-job-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-job-execute-async
client.machineLearning().deleteCalendarJobAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-job-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
// tag::get-calendars-request
GetCalendarsRequest request = new GetCalendarsRequest(); // <1>
// end::get-calendars-request
// tag::get-calendars-id
request.setCalendarId("holidays"); // <1>
// end::get-calendars-id
// tag::get-calendars-page
request.setPageParams(new PageParams(10, 20)); // <1>
// end::get-calendars-page
// reset page params
request.setPageParams(null);
// tag::get-calendars-execute
GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT);
// end::get-calendars-execute
// tag::get-calendars-response
long count = response.count(); // <1>
List<Calendar> calendars = response.calendars(); // <2>
// end::get-calendars-response
assertEquals(1, calendars.size());
}
{
GetCalendarsRequest request = new GetCalendarsRequest("holidays");
// tag::get-calendars-execute-listener
ActionListener<GetCalendarsResponse> listener =
new ActionListener<GetCalendarsResponse>() {
@Override
public void onResponse(GetCalendarsResponse getCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-calendars-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-calendars-execute-async
client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-calendars-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendar() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putCalendarRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putCalendarRequest, RequestOptions.DEFAULT);
// tag::delete-calendar-request
DeleteCalendarRequest request = new DeleteCalendarRequest("holidays"); // <1>
// end::delete-calendar-request
// tag::delete-calendar-execute
AcknowledgedResponse response = client.machineLearning().deleteCalendar(request, RequestOptions.DEFAULT);
// end::delete-calendar-execute
// tag::delete-calendar-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-calendar-response
assertTrue(isAcknowledged);
// tag::delete-calendar-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-execute-async
client.machineLearning().deleteCalendarAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
public void testGetCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT);
{
// tag::get-calendar-events-request
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays"); // <1>
// end::get-calendar-events-request
// tag::get-calendar-events-page
request.setPageParams(new PageParams(10, 20)); // <1>
// end::get-calendar-events-page
// tag::get-calendar-events-start
request.setStart("2018-08-01T00:00:00Z"); // <1>
// end::get-calendar-events-start
// tag::get-calendar-events-end
request.setEnd("2018-08-02T00:00:00Z"); // <1>
// end::get-calendar-events-end
// tag::get-calendar-events-jobid
request.setJobId("job_1"); // <1>
// end::get-calendar-events-jobid
// reset params
request.setPageParams(null);
request.setJobId(null);
request.setStart(null);
request.setEnd(null);
// tag::get-calendar-events-execute
GetCalendarEventsResponse response = client.machineLearning().getCalendarEvents(request, RequestOptions.DEFAULT);
// end::get-calendar-events-execute
// tag::get-calendar-events-response
long count = response.count(); // <1>
List<ScheduledEvent> scheduledEvents = response.events(); // <2>
// end::get-calendar-events-response
assertEquals(1, scheduledEvents.size());
}
{
GetCalendarEventsRequest request = new GetCalendarEventsRequest("holidays");
// tag::get-calendar-events-execute-listener
ActionListener<GetCalendarEventsResponse> listener =
new ActionListener<GetCalendarEventsResponse>() {
@Override
public void onResponse(GetCalendarEventsResponse getCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-calendar-events-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-calendar-events-execute-async
client.machineLearning().getCalendarEventsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-calendar-events-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPostCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance(calendar.getId(), null));
// tag::post-calendar-event-request
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", // <1>
events); // <2>
// end::post-calendar-event-request
// tag::post-calendar-event-execute
PostCalendarEventResponse response = client.machineLearning().postCalendarEvent(request, RequestOptions.DEFAULT);
// end::post-calendar-event-execute
// tag::post-calendar-event-response
List<ScheduledEvent> scheduledEvents = response.getScheduledEvents(); // <1>
// end::post-calendar-event-response
assertEquals(1, scheduledEvents.size());
}
{
List<ScheduledEvent> events = Collections.singletonList(ScheduledEventTests.testInstance());
PostCalendarEventRequest request = new PostCalendarEventRequest("holidays", events); // <1>
// tag::post-calendar-event-execute-listener
ActionListener<PostCalendarEventResponse> listener =
new ActionListener<PostCalendarEventResponse>() {
@Override
public void onResponse(PostCalendarEventResponse postCalendarsResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::post-calendar-event-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::post-calendar-event-execute-async
client.machineLearning().postCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::post-calendar-event-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteCalendarEvent() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
Calendar calendar = new Calendar("holidays",
Arrays.asList("job_1", "job_group_1", "job_2"),
"A calendar for public holidays");
PutCalendarRequest putRequest = new PutCalendarRequest(calendar);
client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT);
List<ScheduledEvent> events = Arrays.asList(ScheduledEventTests.testInstance(calendar.getId(), null),
ScheduledEventTests.testInstance(calendar.getId(), null));
client.machineLearning().postCalendarEvent(new PostCalendarEventRequest("holidays", events), RequestOptions.DEFAULT);
GetCalendarEventsResponse getCalendarEventsResponse =
client.machineLearning().getCalendarEvents(new GetCalendarEventsRequest("holidays"), RequestOptions.DEFAULT);
{
// tag::delete-calendar-event-request
DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays", // <1>
"EventId"); // <2>
// end::delete-calendar-event-request
request = new DeleteCalendarEventRequest("holidays", getCalendarEventsResponse.events().get(0).getEventId());
// tag::delete-calendar-event-execute
AcknowledgedResponse response = client.machineLearning().deleteCalendarEvent(request, RequestOptions.DEFAULT);
// end::delete-calendar-event-execute
// tag::delete-calendar-event-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::delete-calendar-event-response
assertThat(acknowledged, is(true));
}
{
DeleteCalendarEventRequest request = new DeleteCalendarEventRequest("holidays",
getCalendarEventsResponse.events().get(1).getEventId());
// tag::delete-calendar-event-execute-listener
ActionListener<AcknowledgedResponse> listener =
new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse deleteCalendarEventResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-calendar-event-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-calendar-event-execute-async
client.machineLearning().deleteCalendarEventAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-calendar-event-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-request
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-request
// tag::get-data-frame-analytics-execute
GetDataFrameAnalyticsResponse response = client.machineLearning().getDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-execute
// tag::get-data-frame-analytics-response
List<DataFrameAnalyticsConfig> configs = response.getAnalytics();
// end::get-data-frame-analytics-response
assertThat(configs, hasSize(1));
}
{
GetDataFrameAnalyticsRequest request = new GetDataFrameAnalyticsRequest("my-analytics-config");
// tag::get-data-frame-analytics-execute-listener
ActionListener<GetDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-execute-async
client.machineLearning().getDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetDataFrameAnalyticsStats() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::get-data-frame-analytics-stats-request
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config"); // <1>
// end::get-data-frame-analytics-stats-request
// tag::get-data-frame-analytics-stats-execute
GetDataFrameAnalyticsStatsResponse response =
client.machineLearning().getDataFrameAnalyticsStats(request, RequestOptions.DEFAULT);
// end::get-data-frame-analytics-stats-execute
// tag::get-data-frame-analytics-stats-response
List<DataFrameAnalyticsStats> stats = response.getAnalyticsStats();
// end::get-data-frame-analytics-stats-response
assertThat(stats, hasSize(1));
}
{
GetDataFrameAnalyticsStatsRequest request = new GetDataFrameAnalyticsStatsRequest("my-analytics-config");
// tag::get-data-frame-analytics-stats-execute-listener
ActionListener<GetDataFrameAnalyticsStatsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetDataFrameAnalyticsStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-data-frame-analytics-stats-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-data-frame-analytics-stats-execute-async
client.machineLearning().getDataFrameAnalyticsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-data-frame-analytics-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
{
// tag::put-data-frame-analytics-query-config
QueryConfig queryConfig = new QueryConfig(new MatchAllQueryBuilder());
// end::put-data-frame-analytics-query-config
// tag::put-data-frame-analytics-source-config
DataFrameAnalyticsSource sourceConfig = DataFrameAnalyticsSource.builder() // <1>
.setIndex("put-test-source-index") // <2>
.setQueryConfig(queryConfig) // <3>
.setSourceFiltering(new FetchSourceContext(true,
new String[] { "included_field_1", "included_field_2" },
new String[] { "excluded_field" })) // <4>
.build();
// end::put-data-frame-analytics-source-config
// tag::put-data-frame-analytics-dest-config
DataFrameAnalyticsDest destConfig = DataFrameAnalyticsDest.builder() // <1>
.setIndex("put-test-dest-index") // <2>
.build();
// end::put-data-frame-analytics-dest-config
// tag::put-data-frame-analytics-outlier-detection-default
DataFrameAnalysis outlierDetection = OutlierDetection.createDefault(); // <1>
// end::put-data-frame-analytics-outlier-detection-default
// tag::put-data-frame-analytics-outlier-detection-customized
DataFrameAnalysis outlierDetectionCustomized = OutlierDetection.builder() // <1>
.setMethod(OutlierDetection.Method.DISTANCE_KNN) // <2>
.setNNeighbors(5) // <3>
.setFeatureInfluenceThreshold(0.1) // <4>
.setComputeFeatureInfluence(true) // <5>
.setOutlierFraction(0.05) // <6>
.setStandardizationEnabled(true) // <7>
.build();
// end::put-data-frame-analytics-outlier-detection-customized
// tag::put-data-frame-analytics-classification
DataFrameAnalysis classification = Classification.builder("my_dependent_variable") // <1>
.setLambda(1.0) // <2>
.setGamma(5.5) // <3>
.setEta(5.5) // <4>
.setMaxTrees(50) // <5>
.setFeatureBagFraction(0.4) // <6>
.setNumTopFeatureImportanceValues(3) // <7>
.setPredictionFieldName("my_prediction_field_name") // <8>
.setTrainingPercent(50.0) // <9>
.setRandomizeSeed(1234L) // <10>
.setClassAssignmentObjective(Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) // <11>
.setNumTopClasses(1) // <12>
.build();
// end::put-data-frame-analytics-classification
// tag::put-data-frame-analytics-regression
DataFrameAnalysis regression = org.elasticsearch.client.ml.dataframe.Regression.builder("my_dependent_variable") // <1>
.setLambda(1.0) // <2>
.setGamma(5.5) // <3>
.setEta(5.5) // <4>
.setMaxTrees(50) // <5>
.setFeatureBagFraction(0.4) // <6>
.setNumTopFeatureImportanceValues(3) // <7>
.setPredictionFieldName("my_prediction_field_name") // <8>
.setTrainingPercent(50.0) // <9>
.setRandomizeSeed(1234L) // <10>
.setLossFunction(Regression.LossFunction.MSE) // <11>
.setLossFunctionParameter(1.0) // <12>
.build();
// end::put-data-frame-analytics-regression
// tag::put-data-frame-analytics-analyzed-fields
FetchSourceContext analyzedFields =
new FetchSourceContext(
true,
new String[] { "included_field_1", "included_field_2" },
new String[] { "excluded_field" });
// end::put-data-frame-analytics-analyzed-fields
// tag::put-data-frame-analytics-config
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setId("my-analytics-config") // <1>
.setSource(sourceConfig) // <2>
.setDest(destConfig) // <3>
.setAnalysis(outlierDetection) // <4>
.setAnalyzedFields(analyzedFields) // <5>
.setModelMemoryLimit(new ByteSizeValue(5, ByteSizeUnit.MB)) // <6>
.setDescription("this is an example description") // <7>
.build();
// end::put-data-frame-analytics-config
// tag::put-data-frame-analytics-request
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(config); // <1>
// end::put-data-frame-analytics-request
// tag::put-data-frame-analytics-execute
PutDataFrameAnalyticsResponse response = client.machineLearning().putDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::put-data-frame-analytics-execute
// tag::put-data-frame-analytics-response
DataFrameAnalyticsConfig createdConfig = response.getConfig();
// end::put-data-frame-analytics-response
assertThat(createdConfig.getId(), equalTo("my-analytics-config"));
}
{
PutDataFrameAnalyticsRequest request = new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG);
// tag::put-data-frame-analytics-execute-listener
ActionListener<PutDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(PutDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-data-frame-analytics-execute-async
client.machineLearning().putDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::delete-data-frame-analytics-request
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::delete-data-frame-analytics-request
//tag::delete-data-frame-analytics-request-options
request.setForce(false); // <1>
request.setTimeout(TimeValue.timeValueMinutes(1)); // <2>
//end::delete-data-frame-analytics-request-options
// tag::delete-data-frame-analytics-execute
AcknowledgedResponse response = client.machineLearning().deleteDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::delete-data-frame-analytics-execute
// tag::delete-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
// end::delete-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
DeleteDataFrameAnalyticsRequest request = new DeleteDataFrameAnalyticsRequest("my-analytics-config");
// tag::delete-data-frame-analytics-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-data-frame-analytics-execute-async
client.machineLearning().deleteDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testStartDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::start-data-frame-analytics-request
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config"); // <1>
// end::start-data-frame-analytics-request
// tag::start-data-frame-analytics-execute
StartDataFrameAnalyticsResponse response = client.machineLearning().startDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::start-data-frame-analytics-execute
// tag::start-data-frame-analytics-response
boolean acknowledged = response.isAcknowledged();
String node = response.getNode(); // <1>
// end::start-data-frame-analytics-response
assertThat(acknowledged, is(true));
assertThat(node, notNullValue());
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StartDataFrameAnalyticsRequest request = new StartDataFrameAnalyticsRequest("my-analytics-config");
// tag::start-data-frame-analytics-execute-listener
ActionListener<StartDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(StartDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::start-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::start-data-frame-analytics-execute-async
client.machineLearning().startDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::start-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testStopDataFrameAnalytics() throws Exception {
createIndex(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]);
highLevelClient().index(
new IndexRequest(DF_ANALYTICS_CONFIG.getSource().getIndex()[0]).source(XContentType.JSON, "total", 10000)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE), RequestOptions.DEFAULT);
RestHighLevelClient client = highLevelClient();
client.machineLearning().putDataFrameAnalytics(new PutDataFrameAnalyticsRequest(DF_ANALYTICS_CONFIG), RequestOptions.DEFAULT);
{
// tag::stop-data-frame-analytics-request
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config"); // <1>
request.setForce(false); // <2>
// end::stop-data-frame-analytics-request
// tag::stop-data-frame-analytics-execute
StopDataFrameAnalyticsResponse response = client.machineLearning().stopDataFrameAnalytics(request, RequestOptions.DEFAULT);
// end::stop-data-frame-analytics-execute
// tag::stop-data-frame-analytics-response
boolean acknowledged = response.isStopped();
// end::stop-data-frame-analytics-response
assertThat(acknowledged, is(true));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
{
StopDataFrameAnalyticsRequest request = new StopDataFrameAnalyticsRequest("my-analytics-config");
// tag::stop-data-frame-analytics-execute-listener
ActionListener<StopDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(StopDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::stop-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::stop-data-frame-analytics-execute-async
client.machineLearning().stopDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::stop-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
assertBusy(
() -> assertThat(getAnalyticsState(DF_ANALYTICS_CONFIG.getId()), equalTo(DataFrameAnalyticsState.STOPPED)),
30, TimeUnit.SECONDS);
}
public void testEvaluateDataFrame() throws Exception {
String indexName = "evaluate-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("label")
.field("type", "keyword")
.endObject()
.startObject("p")
.field("type", "double")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.1)) // #0
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.2)) // #1
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.3)) // #2
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.4)) // #3
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", false, "p", 0.7)) // #4
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.2)) // #5
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.3)) // #6
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.4)) // #7
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.8)) // #8
.add(new IndexRequest().source(XContentType.JSON, "dataset", "blue", "label", true, "p", 0.9)); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-softclassification
Evaluation evaluation =
new BinarySoftClassification( // <1>
"label", // <2>
"p", // <3>
// Evaluation metrics // <4>
PrecisionMetric.at(0.4, 0.5, 0.6), // <5>
RecallMetric.at(0.5, 0.7), // <6>
ConfusionMatrixMetric.at(0.5), // <7>
AucRocMetric.withCurve()); // <8>
// end::evaluate-data-frame-evaluation-softclassification
// tag::evaluate-data-frame-request
EvaluateDataFrameRequest request =
new EvaluateDataFrameRequest( // <1>
indexName, // <2>
new QueryConfig(QueryBuilders.termQuery("dataset", "blue")), // <3>
evaluation); // <4>
// end::evaluate-data-frame-request
// tag::evaluate-data-frame-execute
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// end::evaluate-data-frame-execute
// tag::evaluate-data-frame-response
List<EvaluationMetric.Result> metrics = response.getMetrics(); // <1>
// end::evaluate-data-frame-response
// tag::evaluate-data-frame-results-softclassification
PrecisionMetric.Result precisionResult = response.getMetricByName(PrecisionMetric.NAME); // <1>
double precision = precisionResult.getScoreByThreshold("0.4"); // <2>
ConfusionMatrixMetric.Result confusionMatrixResult = response.getMetricByName(ConfusionMatrixMetric.NAME); // <3>
ConfusionMatrix confusionMatrix = confusionMatrixResult.getScoreByThreshold("0.5"); // <4>
// end::evaluate-data-frame-results-softclassification
assertThat(
metrics.stream().map(EvaluationMetric.Result::getMetricName).collect(Collectors.toList()),
containsInAnyOrder(PrecisionMetric.NAME, RecallMetric.NAME, ConfusionMatrixMetric.NAME, AucRocMetric.NAME));
assertThat(precision, closeTo(0.6, 1e-9));
assertThat(confusionMatrix.getTruePositives(), equalTo(2L)); // docs #8 and #9
assertThat(confusionMatrix.getFalsePositives(), equalTo(1L)); // doc #4
assertThat(confusionMatrix.getTrueNegatives(), equalTo(4L)); // docs #0, #1, #2 and #3
assertThat(confusionMatrix.getFalseNegatives(), equalTo(3L)); // docs #5, #6 and #7
}
{
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(
indexName,
new QueryConfig(QueryBuilders.termQuery("dataset", "blue")),
new BinarySoftClassification(
"label",
"p",
PrecisionMetric.at(0.4, 0.5, 0.6),
RecallMetric.at(0.5, 0.7),
ConfusionMatrixMetric.at(0.5),
AucRocMetric.withCurve()));
// tag::evaluate-data-frame-execute-listener
ActionListener<EvaluateDataFrameResponse> listener = new ActionListener<>() {
@Override
public void onResponse(EvaluateDataFrameResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::evaluate-data-frame-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::evaluate-data-frame-execute-async
client.machineLearning().evaluateDataFrameAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::evaluate-data-frame-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testEvaluateDataFrame_Classification() throws Exception {
String indexName = "evaluate-classification-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("actual_class")
.field("type", "keyword")
.endObject()
.startObject("predicted_class")
.field("type", "keyword")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #0
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #1
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "cat")) // #2
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "dog")) // #3
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "cat", "predicted_class", "fox")) // #4
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "cat")) // #5
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #6
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #7
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "dog", "predicted_class", "dog")) // #8
.add(new IndexRequest().source(XContentType.JSON, "actual_class", "ant", "predicted_class", "cat")); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-classification
Evaluation evaluation =
new org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification( // <1>
"actual_class", // <2>
"predicted_class", // <3>
// Evaluation metrics // <4>
new AccuracyMetric(), // <5>
new org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric(), // <6>
new org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric(), // <7>
new MulticlassConfusionMatrixMetric(3)); // <8>
// end::evaluate-data-frame-evaluation-classification
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation);
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// tag::evaluate-data-frame-results-classification
AccuracyMetric.Result accuracyResult = response.getMetricByName(AccuracyMetric.NAME); // <1>
double accuracy = accuracyResult.getOverallAccuracy(); // <2>
org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.Result precisionResult =
response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME); // <3>
double precision = precisionResult.getAvgPrecision(); // <4>
org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.Result recallResult =
response.getMetricByName(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME); // <5>
double recall = recallResult.getAvgRecall(); // <6>
MulticlassConfusionMatrixMetric.Result multiclassConfusionMatrix =
response.getMetricByName(MulticlassConfusionMatrixMetric.NAME); // <7>
List<ActualClass> confusionMatrix = multiclassConfusionMatrix.getConfusionMatrix(); // <8>
long otherClassesCount = multiclassConfusionMatrix.getOtherActualClassCount(); // <9>
// end::evaluate-data-frame-results-classification
assertThat(accuracyResult.getMetricName(), equalTo(AccuracyMetric.NAME));
assertThat(accuracy, equalTo(0.6));
assertThat(
precisionResult.getMetricName(),
equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric.NAME));
assertThat(precision, equalTo(0.675));
assertThat(
recallResult.getMetricName(),
equalTo(org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric.NAME));
assertThat(recall, equalTo(0.45));
assertThat(multiclassConfusionMatrix.getMetricName(), equalTo(MulticlassConfusionMatrixMetric.NAME));
assertThat(
confusionMatrix,
equalTo(
List.of(
new ActualClass(
"ant",
1L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 0L)),
0L),
new ActualClass(
"cat",
5L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 3L), new PredictedClass("dog", 1L)),
1L),
new ActualClass(
"dog",
4L,
List.of(new PredictedClass("ant", 0L), new PredictedClass("cat", 1L), new PredictedClass("dog", 3L)),
0L))));
assertThat(otherClassesCount, equalTo(0L));
}
}
public void testEvaluateDataFrame_Regression() throws Exception {
String indexName = "evaluate-classification-test-index";
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(indexName)
.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("actual_value")
.field("type", "double")
.endObject()
.startObject("predicted_value")
.field("type", "double")
.endObject()
.endObject()
.endObject());
BulkRequest bulkRequest =
new BulkRequest(indexName)
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 1.0)) // #0
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.0, "predicted_value", 0.9)) // #1
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.0, "predicted_value", 2.0)) // #2
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.5, "predicted_value", 1.4)) // #3
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.2, "predicted_value", 1.3)) // #4
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 1.7, "predicted_value", 2.0)) // #5
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.1, "predicted_value", 2.1)) // #6
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.7)) // #7
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 0.8, "predicted_value", 1.0)) // #8
.add(new IndexRequest().source(XContentType.JSON, "actual_value", 2.5, "predicted_value", 2.4)); // #9
RestHighLevelClient client = highLevelClient();
client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::evaluate-data-frame-evaluation-regression
Evaluation evaluation =
new org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression( // <1>
"actual_value", // <2>
"predicted_value", // <3>
// Evaluation metrics // <4>
new MeanSquaredErrorMetric(), // <5>
new RSquaredMetric()); // <6>
// end::evaluate-data-frame-evaluation-regression
EvaluateDataFrameRequest request = new EvaluateDataFrameRequest(indexName, null, evaluation);
EvaluateDataFrameResponse response = client.machineLearning().evaluateDataFrame(request, RequestOptions.DEFAULT);
// tag::evaluate-data-frame-results-regression
MeanSquaredErrorMetric.Result meanSquaredErrorResult = response.getMetricByName(MeanSquaredErrorMetric.NAME); // <1>
double meanSquaredError = meanSquaredErrorResult.getError(); // <2>
RSquaredMetric.Result rSquaredResult = response.getMetricByName(RSquaredMetric.NAME); // <3>
double rSquared = rSquaredResult.getValue(); // <4>
// end::evaluate-data-frame-results-regression
assertThat(meanSquaredError, closeTo(0.021, 1e-3));
assertThat(rSquared, closeTo(0.941, 1e-3));
}
}
public void testExplainDataFrameAnalytics() throws Exception {
createIndex("explain-df-test-source-index");
BulkRequest bulkRequest =
new BulkRequest("explain-df-test-source-index")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < 10; ++i) {
bulkRequest.add(new IndexRequest().source(XContentType.JSON, "timestamp", 123456789L, "total", 10L));
}
RestHighLevelClient client = highLevelClient();
client.bulk(bulkRequest, RequestOptions.DEFAULT);
{
// tag::explain-data-frame-analytics-id-request
ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest("existing_job_id"); // <1>
// end::explain-data-frame-analytics-id-request
// tag::explain-data-frame-analytics-config-request
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build())
.setAnalysis(OutlierDetection.createDefault())
.build();
request = new ExplainDataFrameAnalyticsRequest(config); // <1>
// end::explain-data-frame-analytics-config-request
// tag::explain-data-frame-analytics-execute
ExplainDataFrameAnalyticsResponse response = client.machineLearning().explainDataFrameAnalytics(request,
RequestOptions.DEFAULT);
// end::explain-data-frame-analytics-execute
// tag::explain-data-frame-analytics-response
List<FieldSelection> fieldSelection = response.getFieldSelection(); // <1>
MemoryEstimation memoryEstimation = response.getMemoryEstimation(); // <2>
// end::explain-data-frame-analytics-response
assertThat(fieldSelection.size(), equalTo(2));
assertThat(fieldSelection.stream().map(FieldSelection::getName).collect(Collectors.toList()), contains("timestamp", "total"));
ByteSizeValue expectedMemoryWithoutDisk = memoryEstimation.getExpectedMemoryWithoutDisk(); // <1>
ByteSizeValue expectedMemoryWithDisk = memoryEstimation.getExpectedMemoryWithDisk(); // <2>
// We are pretty liberal here as this test does not aim at verifying concrete numbers but rather end-to-end user workflow.
ByteSizeValue lowerBound = new ByteSizeValue(1, ByteSizeUnit.KB);
ByteSizeValue upperBound = new ByteSizeValue(1, ByteSizeUnit.GB);
assertThat(expectedMemoryWithoutDisk, allOf(greaterThan(lowerBound), lessThan(upperBound)));
assertThat(expectedMemoryWithDisk, allOf(greaterThan(lowerBound), lessThan(upperBound)));
}
{
DataFrameAnalyticsConfig config = DataFrameAnalyticsConfig.builder()
.setSource(DataFrameAnalyticsSource.builder().setIndex("explain-df-test-source-index").build())
.setAnalysis(OutlierDetection.createDefault())
.build();
ExplainDataFrameAnalyticsRequest request = new ExplainDataFrameAnalyticsRequest(config);
// tag::explain-data-frame-analytics-execute-listener
ActionListener<ExplainDataFrameAnalyticsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(ExplainDataFrameAnalyticsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::explain-data-frame-analytics-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::explain-data-frame-analytics-execute-async
client.machineLearning().explainDataFrameAnalyticsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::explain-data-frame-analytics-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetTrainedModels() throws Exception {
putTrainedModel("my-trained-model");
RestHighLevelClient client = highLevelClient();
{
// tag::get-trained-models-request
GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model") // <1>
.setPageParams(new PageParams(0, 1)) // <2>
.setIncludeDefinition(false) // <3>
.setDecompressDefinition(false) // <4>
.setAllowNoMatch(true) // <5>
.setTags("regression") // <6>
.setForExport(false); // <7>
// end::get-trained-models-request
request.setTags((List<String>)null);
// tag::get-trained-models-execute
GetTrainedModelsResponse response = client.machineLearning().getTrainedModels(request, RequestOptions.DEFAULT);
// end::get-trained-models-execute
// tag::get-trained-models-response
List<TrainedModelConfig> models = response.getTrainedModels();
// end::get-trained-models-response
assertThat(models, hasSize(1));
}
{
GetTrainedModelsRequest request = new GetTrainedModelsRequest("my-trained-model");
// tag::get-trained-models-execute-listener
ActionListener<GetTrainedModelsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetTrainedModelsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-trained-models-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-trained-models-execute-async
client.machineLearning().getTrainedModelsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-trained-models-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutTrainedModel() throws Exception {
TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build();
// tag::put-trained-model-config
TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition) // <1>
.setCompressedDefinition(InferenceToXContentCompressor.deflate(definition)) // <2>
.setModelId("my-new-trained-model") // <3>
.setInput(new TrainedModelInput("col1", "col2", "col3", "col4")) // <4>
.setDescription("test model") // <5>
.setMetadata(new HashMap<>()) // <6>
.setTags("my_regression_models") // <7>
.setInferenceConfig(new RegressionConfig("value", 0)) // <8>
.build();
// end::put-trained-model-config
trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition)
.setInferenceConfig(new RegressionConfig(null, null))
.setModelId("my-new-trained-model")
.setInput(new TrainedModelInput("col1", "col2", "col3", "col4"))
.setDescription("test model")
.setMetadata(new HashMap<>())
.setTags("my_regression_models")
.build();
RestHighLevelClient client = highLevelClient();
{
// tag::put-trained-model-request
PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig); // <1>
// end::put-trained-model-request
// tag::put-trained-model-execute
PutTrainedModelResponse response = client.machineLearning().putTrainedModel(request, RequestOptions.DEFAULT);
// end::put-trained-model-execute
// tag::put-trained-model-response
TrainedModelConfig model = response.getResponse();
// end::put-trained-model-response
assertThat(model.getModelId(), equalTo(trainedModelConfig.getModelId()));
highLevelClient().machineLearning()
.deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT);
}
{
PutTrainedModelRequest request = new PutTrainedModelRequest(trainedModelConfig);
// tag::put-trained-model-execute-listener
ActionListener<PutTrainedModelResponse> listener = new ActionListener<>() {
@Override
public void onResponse(PutTrainedModelResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-trained-model-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-trained-model-execute-async
client.machineLearning().putTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-trained-model-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
highLevelClient().machineLearning()
.deleteTrainedModel(new DeleteTrainedModelRequest("my-new-trained-model"), RequestOptions.DEFAULT);
}
}
public void testGetTrainedModelsStats() throws Exception {
putTrainedModel("my-trained-model");
RestHighLevelClient client = highLevelClient();
{
// tag::get-trained-models-stats-request
GetTrainedModelsStatsRequest request =
new GetTrainedModelsStatsRequest("my-trained-model") // <1>
.setPageParams(new PageParams(0, 1)) // <2>
.setAllowNoMatch(true); // <3>
// end::get-trained-models-stats-request
// tag::get-trained-models-stats-execute
GetTrainedModelsStatsResponse response =
client.machineLearning().getTrainedModelsStats(request, RequestOptions.DEFAULT);
// end::get-trained-models-stats-execute
// tag::get-trained-models-stats-response
List<TrainedModelStats> models = response.getTrainedModelStats();
// end::get-trained-models-stats-response
assertThat(models, hasSize(1));
}
{
GetTrainedModelsStatsRequest request = new GetTrainedModelsStatsRequest("my-trained-model");
// tag::get-trained-models-stats-execute-listener
ActionListener<GetTrainedModelsStatsResponse> listener = new ActionListener<>() {
@Override
public void onResponse(GetTrainedModelsStatsResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-trained-models-stats-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-trained-models-stats-execute-async
client.machineLearning()
.getTrainedModelsStatsAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-trained-models-stats-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteTrainedModel() throws Exception {
RestHighLevelClient client = highLevelClient();
{
putTrainedModel("my-trained-model");
// tag::delete-trained-model-request
DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model"); // <1>
// end::delete-trained-model-request
// tag::delete-trained-model-execute
AcknowledgedResponse response = client.machineLearning().deleteTrainedModel(request, RequestOptions.DEFAULT);
// end::delete-trained-model-execute
// tag::delete-trained-model-response
boolean deleted = response.isAcknowledged();
// end::delete-trained-model-response
assertThat(deleted, is(true));
}
{
putTrainedModel("my-trained-model");
DeleteTrainedModelRequest request = new DeleteTrainedModelRequest("my-trained-model");
// tag::delete-trained-model-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-trained-model-execute-listener
// Replace the empty listener by a blocking listener in test
CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-trained-model-execute-async
client.machineLearning().deleteTrainedModelAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::delete-trained-model-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testCreateFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::put-filter-config
MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains") // <1>
.setDescription("A list of safe domains") // <2>
.setItems("*.google.com", "wikipedia.org"); // <3>
// end::put-filter-config
// tag::put-filter-request
PutFilterRequest request = new PutFilterRequest(filterBuilder.build()); // <1>
// end::put-filter-request
// tag::put-filter-execute
PutFilterResponse response = client.machineLearning().putFilter(request, RequestOptions.DEFAULT);
// end::put-filter-execute
// tag::put-filter-response
MlFilter createdFilter = response.getResponse(); // <1>
// end::put-filter-response
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
}
{
MlFilter.Builder filterBuilder = MlFilter.builder("safe_domains_async")
.setDescription("A list of safe domains")
.setItems("*.google.com", "wikipedia.org");
PutFilterRequest request = new PutFilterRequest(filterBuilder.build());
// tag::put-filter-execute-listener
ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() {
@Override
public void onResponse(PutFilterResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-filter-execute-async
client.machineLearning().putFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::put-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetFilters() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String filterId = "get-filter-doc-test";
MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org");
client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT);
{
// tag::get-filters-request
GetFiltersRequest request = new GetFiltersRequest(); // <1>
// end::get-filters-request
// tag::get-filters-filter-id
request.setFilterId("get-filter-doc-test"); // <1>
// end::get-filters-filter-id
// tag::get-filters-page-params
request.setFrom(100); // <1>
request.setSize(200); // <2>
// end::get-filters-page-params
request.setFrom(null);
request.setSize(null);
// tag::get-filters-execute
GetFiltersResponse response = client.machineLearning().getFilter(request, RequestOptions.DEFAULT);
// end::get-filters-execute
// tag::get-filters-response
long count = response.count(); // <1>
List<MlFilter> filters = response.filters(); // <2>
// end::get-filters-response
assertEquals(1, filters.size());
}
{
GetFiltersRequest request = new GetFiltersRequest();
request.setFilterId(filterId);
// tag::get-filters-execute-listener
ActionListener<GetFiltersResponse> listener = new ActionListener<GetFiltersResponse>() {
@Override
public void onResponse(GetFiltersResponse getfiltersResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-filters-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-filters-execute-async
client.machineLearning().getFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-filters-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testUpdateFilter() throws IOException, InterruptedException {
RestHighLevelClient client = highLevelClient();
String filterId = "update-filter-doc-test";
MlFilter.Builder filterBuilder = MlFilter.builder(filterId).setDescription("test").setItems("*.google.com", "wikipedia.org");
client.machineLearning().putFilter(new PutFilterRequest(filterBuilder.build()), RequestOptions.DEFAULT);
{
// tag::update-filter-request
UpdateFilterRequest request = new UpdateFilterRequest(filterId); // <1>
// end::update-filter-request
// tag::update-filter-description
request.setDescription("my new description"); // <1>
// end::update-filter-description
// tag::update-filter-add-items
request.setAddItems(Arrays.asList("*.bing.com", "*.elastic.co")); // <1>
// end::update-filter-add-items
// tag::update-filter-remove-items
request.setRemoveItems(Arrays.asList("*.google.com")); // <1>
// end::update-filter-remove-items
// tag::update-filter-execute
PutFilterResponse response = client.machineLearning().updateFilter(request, RequestOptions.DEFAULT);
// end::update-filter-execute
// tag::update-filter-response
MlFilter updatedFilter = response.getResponse(); // <1>
// end::update-filter-response
assertEquals(request.getDescription(), updatedFilter.getDescription());
}
{
UpdateFilterRequest request = new UpdateFilterRequest(filterId);
// tag::update-filter-execute-listener
ActionListener<PutFilterResponse> listener = new ActionListener<PutFilterResponse>() {
@Override
public void onResponse(PutFilterResponse putFilterResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::update-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::update-filter-execute-async
client.machineLearning().updateFilterAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::update-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testDeleteFilter() throws Exception {
RestHighLevelClient client = highLevelClient();
String filterId = createFilter(client);
{
// tag::delete-filter-request
DeleteFilterRequest request = new DeleteFilterRequest(filterId); // <1>
// end::delete-filter-request
// tag::delete-filter-execute
AcknowledgedResponse response = client.machineLearning().deleteFilter(request, RequestOptions.DEFAULT);
// end::delete-filter-execute
// tag::delete-filter-response
boolean isAcknowledged = response.isAcknowledged(); // <1>
// end::delete-filter-response
assertTrue(isAcknowledged);
}
filterId = createFilter(client);
{
DeleteFilterRequest request = new DeleteFilterRequest(filterId);
// tag::delete-filter-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::delete-filter-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::delete-filter-execute-async
client.machineLearning().deleteFilterAsync(request, RequestOptions.DEFAULT, listener); //<1>
// end::delete-filter-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testGetMlInfo() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::get-ml-info-request
MlInfoRequest request = new MlInfoRequest(); // <1>
// end::get-ml-info-request
// tag::get-ml-info-execute
MlInfoResponse response = client.machineLearning()
.getMlInfo(request, RequestOptions.DEFAULT);
// end::get-ml-info-execute
// tag::get-ml-info-response
final Map<String, Object> info = response.getInfo();// <1>
// end::get-ml-info-response
assertTrue(info.containsKey("defaults"));
assertTrue(info.containsKey("limits"));
}
{
MlInfoRequest request = new MlInfoRequest();
// tag::get-ml-info-execute-listener
ActionListener<MlInfoResponse> listener = new ActionListener<MlInfoResponse>() {
@Override
public void onResponse(MlInfoResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::get-ml-info-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::get-ml-info-execute-async
client.machineLearning()
.getMlInfoAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::get-ml-info-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testSetUpgradeMode() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::set-upgrade-mode-request
SetUpgradeModeRequest request = new SetUpgradeModeRequest(true); // <1>
request.setTimeout(TimeValue.timeValueMinutes(10)); // <2>
// end::set-upgrade-mode-request
// Set to false so that the cluster setting does not have to be unset at the end of the test.
request.setEnabled(false);
// tag::set-upgrade-mode-execute
AcknowledgedResponse acknowledgedResponse = client.machineLearning().setUpgradeMode(request, RequestOptions.DEFAULT);
// end::set-upgrade-mode-execute
// tag::set-upgrade-mode-response
boolean acknowledged = acknowledgedResponse.isAcknowledged(); // <1>
// end::set-upgrade-mode-response
assertThat(acknowledged, is(true));
}
{
SetUpgradeModeRequest request = new SetUpgradeModeRequest(false);
// tag::set-upgrade-mode-execute-listener
ActionListener<AcknowledgedResponse> listener = new ActionListener<AcknowledgedResponse>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::set-upgrade-mode-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::set-upgrade-mode-execute-async
client.machineLearning()
.setUpgradeModeAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::set-upgrade-mode-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testEstimateModelMemory() throws Exception {
RestHighLevelClient client = highLevelClient();
{
// tag::estimate-model-memory-request
Detector.Builder detectorBuilder = new Detector.Builder()
.setFunction("count")
.setPartitionFieldName("status");
AnalysisConfig.Builder analysisConfigBuilder =
new AnalysisConfig.Builder(Collections.singletonList(detectorBuilder.build()))
.setBucketSpan(TimeValue.timeValueMinutes(10))
.setInfluencers(Collections.singletonList("src_ip"));
EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfigBuilder.build()); // <1>
request.setOverallCardinality(Collections.singletonMap("status", 50L)); // <2>
request.setMaxBucketCardinality(Collections.singletonMap("src_ip", 30L)); // <3>
// end::estimate-model-memory-request
// tag::estimate-model-memory-execute
EstimateModelMemoryResponse estimateModelMemoryResponse =
client.machineLearning().estimateModelMemory(request, RequestOptions.DEFAULT);
// end::estimate-model-memory-execute
// tag::estimate-model-memory-response
ByteSizeValue modelMemoryEstimate = estimateModelMemoryResponse.getModelMemoryEstimate(); // <1>
long estimateInBytes = modelMemoryEstimate.getBytes();
// end::estimate-model-memory-response
assertThat(estimateInBytes, greaterThan(10000000L));
}
{
AnalysisConfig analysisConfig =
AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())).build();
EstimateModelMemoryRequest request = new EstimateModelMemoryRequest(analysisConfig);
// tag::estimate-model-memory-execute-listener
ActionListener<EstimateModelMemoryResponse> listener = new ActionListener<EstimateModelMemoryResponse>() {
@Override
public void onResponse(EstimateModelMemoryResponse estimateModelMemoryResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::estimate-model-memory-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::estimate-model-memory-execute-async
client.machineLearning()
.estimateModelMemoryAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::estimate-model-memory-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
private String createFilter(RestHighLevelClient client) throws IOException {
MlFilter.Builder filterBuilder = MlFilter.builder("my_safe_domains")
.setDescription("A list of safe domains")
.setItems("*.google.com", "wikipedia.org");
PutFilterRequest putFilterRequest = new PutFilterRequest(filterBuilder.build());
PutFilterResponse putFilterResponse = client.machineLearning().putFilter(putFilterRequest, RequestOptions.DEFAULT);
MlFilter createdFilter = putFilterResponse.getResponse();
assertThat(createdFilter.getId(), equalTo("my_safe_domains"));
return createdFilter.getId();
}
private void createIndex(String indexName) throws IOException {
CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName);
createIndexRequest.mapping(XContentFactory.jsonBuilder().startObject()
.startObject("properties")
.startObject("timestamp")
.field("type", "date")
.endObject()
.startObject("total")
.field("type", "long")
.endObject()
.endObject()
.endObject());
highLevelClient().indices().create(createIndexRequest, RequestOptions.DEFAULT);
}
private DataFrameAnalyticsState getAnalyticsState(String configId) throws IOException {
GetDataFrameAnalyticsStatsResponse statsResponse =
highLevelClient().machineLearning().getDataFrameAnalyticsStats(
new GetDataFrameAnalyticsStatsRequest(configId), RequestOptions.DEFAULT);
assertThat(statsResponse.getAnalyticsStats(), hasSize(1));
DataFrameAnalyticsStats stats = statsResponse.getAnalyticsStats().get(0);
return stats.getState();
}
private void putTrainedModel(String modelId) throws IOException {
TrainedModelDefinition definition = TrainedModelDefinitionTests.createRandomBuilder(TargetType.REGRESSION).build();
TrainedModelConfig trainedModelConfig = TrainedModelConfig.builder()
.setDefinition(definition)
.setModelId(modelId)
.setInferenceConfig(new RegressionConfig("value", 0))
.setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3", "col4")))
.setDescription("test model")
.build();
highLevelClient().machineLearning().putTrainedModel(new PutTrainedModelRequest(trainedModelConfig), RequestOptions.DEFAULT);
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers());
}
private static final DataFrameAnalyticsConfig DF_ANALYTICS_CONFIG =
DataFrameAnalyticsConfig.builder()
.setId("my-analytics-config")
.setSource(DataFrameAnalyticsSource.builder()
.setIndex("put-test-source-index")
.build())
.setDest(DataFrameAnalyticsDest.builder()
.setIndex("put-test-dest-index")
.build())
.setAnalysis(OutlierDetection.createDefault())
.build();
}
| uschindler/elasticsearch | client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java | Java | apache-2.0 | 199,205 |
package org.tmarciniak.mtp.model;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import org.apache.commons.lang3.builder.ToStringBuilder;
/**
* @author tomasz.marciniak
*
* Immutable class representing trade message
*/
@Entity
@Table
public final class TradeMessage implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "trade_message_id_seq")
@SequenceGenerator(name = "trade_message_id_seq", sequenceName = "trade_message_id_seq", allocationSize = 1)
private long id;
private String userId;
private String currencyFrom;
private String currencyTo;
private BigDecimal amountBuy;
private BigDecimal amountSell;
private BigDecimal rate;
private Date timePlaced;
private String originatingCountry;
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getCurrencyFrom() {
return currencyFrom;
}
public void setCurrencyFrom(String currencyFrom) {
this.currencyFrom = currencyFrom;
}
public String getCurrencyTo() {
return currencyTo;
}
public void setCurrencyTo(String currencyTo) {
this.currencyTo = currencyTo;
}
public BigDecimal getAmountBuy() {
return amountBuy;
}
public void setAmountBuy(BigDecimal amountBuy) {
this.amountBuy = amountBuy;
}
public BigDecimal getAmountSell() {
return amountSell;
}
public void setAmountSell(BigDecimal amountSell) {
this.amountSell = amountSell;
}
public BigDecimal getRate() {
return rate;
}
public void setRate(BigDecimal rate) {
this.rate = rate;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((amountBuy == null) ? 0 : amountBuy.hashCode());
result = prime * result
+ ((amountSell == null) ? 0 : amountSell.hashCode());
result = prime * result
+ ((currencyFrom == null) ? 0 : currencyFrom.hashCode());
result = prime * result
+ ((currencyTo == null) ? 0 : currencyTo.hashCode());
result = prime * result + (int) (id ^ (id >>> 32));
result = prime
* result
+ ((originatingCountry == null) ? 0 : originatingCountry
.hashCode());
result = prime * result + ((rate == null) ? 0 : rate.hashCode());
result = prime * result
+ ((timePlaced == null) ? 0 : timePlaced.hashCode());
result = prime * result + ((userId == null) ? 0 : userId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TradeMessage other = (TradeMessage) obj;
if (amountBuy == null) {
if (other.amountBuy != null)
return false;
} else if (!amountBuy.equals(other.amountBuy))
return false;
if (amountSell == null) {
if (other.amountSell != null)
return false;
} else if (!amountSell.equals(other.amountSell))
return false;
if (currencyFrom == null) {
if (other.currencyFrom != null)
return false;
} else if (!currencyFrom.equals(other.currencyFrom))
return false;
if (currencyTo == null) {
if (other.currencyTo != null)
return false;
} else if (!currencyTo.equals(other.currencyTo))
return false;
if (id != other.id)
return false;
if (originatingCountry == null) {
if (other.originatingCountry != null)
return false;
} else if (!originatingCountry.equals(other.originatingCountry))
return false;
if (rate == null) {
if (other.rate != null)
return false;
} else if (!rate.equals(other.rate))
return false;
if (timePlaced == null) {
if (other.timePlaced != null)
return false;
} else if (!timePlaced.equals(other.timePlaced))
return false;
if (userId == null) {
if (other.userId != null)
return false;
} else if (!userId.equals(other.userId))
return false;
return true;
}
public Date getTimePlaced() {
return new Date(timePlaced.getTime());
}
public void setTimePlaced(Date timePlaced) {
this.timePlaced = timePlaced;
}
public String getOriginatingCountry() {
return originatingCountry;
}
public void setOriginatingCountry(String originatingCountry) {
this.originatingCountry = originatingCountry;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this);
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
| tmarciniak/cfmtp | mtp-core/src/main/java/org/tmarciniak/mtp/model/TradeMessage.java | Java | apache-2.0 | 4,895 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.polly;
import javax.annotation.Generated;
import com.amazonaws.services.polly.model.*;
/**
* Abstract implementation of {@code AmazonPollyAsync}. Convenient method forms pass through to the corresponding
* overload that takes a request object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonPollyAsync extends AbstractAmazonPolly implements AmazonPollyAsync {
protected AbstractAmazonPollyAsync() {
}
@Override
public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request) {
return deleteLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteLexiconResult> deleteLexiconAsync(DeleteLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteLexiconRequest, DeleteLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request) {
return describeVoicesAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeVoicesResult> describeVoicesAsync(DescribeVoicesRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeVoicesRequest, DescribeVoicesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request) {
return getLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<GetLexiconResult> getLexiconAsync(GetLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<GetLexiconRequest, GetLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request) {
return listLexiconsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListLexiconsResult> listLexiconsAsync(ListLexiconsRequest request,
com.amazonaws.handlers.AsyncHandler<ListLexiconsRequest, ListLexiconsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request) {
return putLexiconAsync(request, null);
}
@Override
public java.util.concurrent.Future<PutLexiconResult> putLexiconAsync(PutLexiconRequest request,
com.amazonaws.handlers.AsyncHandler<PutLexiconRequest, PutLexiconResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request) {
return synthesizeSpeechAsync(request, null);
}
@Override
public java.util.concurrent.Future<SynthesizeSpeechResult> synthesizeSpeechAsync(SynthesizeSpeechRequest request,
com.amazonaws.handlers.AsyncHandler<SynthesizeSpeechRequest, SynthesizeSpeechResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
| dagnir/aws-sdk-java | aws-java-sdk-polly/src/main/java/com/amazonaws/services/polly/AbstractAmazonPollyAsync.java | Java | apache-2.0 | 4,021 |
package dk.itu.pervasive.mobile.data;
import android.app.Activity;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.preference.PreferenceManager;
import android.provider.MediaStore;
import android.util.Log;
import android.widget.Toast;
import dk.itu.pervasive.mobile.R;
import java.io.FileOutputStream;
/**
* @author Tony Beltramelli www.tonybeltramelli.com
*/
public class DataManager
{
public static final String PREF_KEY_SAVE = "save";
public static final String PREF_KEY_USERNAME = "username";
public static final String PREF_KEY_SURFACE_ADDRESS = "surfaceAddress";
public static final String PREF_KEY_STICKER_ID = "stickerID";
private static DataManager _instance = null;
private Activity _context;
private String _username = "";
private String _surfaceAddress = "";
private String _stickerID = "";
private DataManager()
{
}
public static DataManager getInstance()
{
if (_instance == null)
{
_instance = new DataManager();
}
return _instance;
}
public void saveData()
{
_username = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_USERNAME, _context.getResources().getString(R.string.preference_user_name_default));
_surfaceAddress = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_SURFACE_ADDRESS, _context.getResources().getString(R.string.preference_surface_address_default));
_stickerID = PreferenceManager.getDefaultSharedPreferences(_context).getString(PREF_KEY_STICKER_ID, _context.getResources().getString(R.string.preference_sticker_id_default));
Log.wtf("save data", _username + ", " + _surfaceAddress + ", " + _stickerID);
}
public String getPathFromUri(Uri uri)
{
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = _context.getContentResolver().query(uri, projection, null, null, null);
int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
cursor.moveToFirst();
return cursor.getString(column_index);
}
public void saveImage(String imageName, byte[] bytes)
{
FileOutputStream fos;
try
{
fos = _context.openFileOutput(imageName, Context.MODE_PRIVATE);
fos.write(bytes);
fos.close();
} catch (Exception e)
{
e.printStackTrace();
}
}
public void displayMessage(final String message)
{
_context.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(_context, message, Toast.LENGTH_SHORT).show();
}
});
}
public String getUsername()
{
return _username;
}
public String getSurfaceAddress()
{
return _surfaceAddress;
}
public String getStickerID()
{
return _stickerID;
}
public void setContext(Activity context)
{
_context = context;
saveData();
}
public Context getContext(){
return _context;
}
}
| tonybeltramelli/Ubiquitous-Media-Sharing-Surface | dk.itu.pervasive.mobile.android/src/dk/itu/pervasive/mobile/data/DataManager.java | Java | apache-2.0 | 2,863 |
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.struts.actionforms;
import static org.mifos.framework.util.helpers.DateUtils.dateFallsBeforeDate;
import static org.mifos.framework.util.helpers.DateUtils.getDateAsSentFromBrowser;
import java.sql.Date;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.Globals;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.joda.time.LocalDate;
import org.mifos.accounts.servicefacade.AccountTypeDto;
import org.mifos.accounts.util.helpers.AccountConstants;
import org.mifos.application.admin.servicefacade.InvalidDateException;
import org.mifos.application.master.business.MifosCurrency;
import org.mifos.config.AccountingRules;
import org.mifos.framework.business.util.helpers.MethodNameConstants;
import org.mifos.framework.struts.actionforms.BaseActionForm;
import org.mifos.framework.util.helpers.Constants;
import org.mifos.framework.util.helpers.DateUtils;
import org.mifos.framework.util.helpers.DoubleConversionResult;
import org.mifos.framework.util.helpers.SessionUtils;
import org.mifos.security.login.util.helpers.LoginConstants;
import org.mifos.security.util.ActivityMapper;
import org.mifos.security.util.UserContext;
public class AccountApplyPaymentActionForm extends BaseActionForm {
private String input;
private String transactionDateDD;
private String transactionDateMM;
private String transactionDateYY;
private String amount;
private Short currencyId;
private String receiptId;
private String receiptDateDD;
private String receiptDateMM;
private String receiptDateYY;
/*
* Among other things, this field holds the PaymentTypes value for disbursements.
*/
private String paymentTypeId;
private String waiverInterest;
private String globalAccountNum;
private String accountId;
private String prdOfferingName;
private boolean amountCannotBeZero = true;
private java.util.Date lastPaymentDate;
private String accountForTransfer;
private Short transferPaymentTypeId;
public boolean amountCannotBeZero() {
return this.amountCannotBeZero;
}
public void setAmountCannotBeZero(boolean amountCannotBeZero) {
this.amountCannotBeZero = amountCannotBeZero;
}
public String getPrdOfferingName() {
return prdOfferingName;
}
public void setPrdOfferingName(String prdOfferingName) {
this.prdOfferingName = prdOfferingName;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getInput() {
return input;
}
@Override
public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) {
String methodCalled = request.getParameter(MethodNameConstants.METHOD);
ActionErrors errors = new ActionErrors();
if (methodCalled != null && methodCalled.equals("preview")) {
validateTransfer(errors);
validateTransactionDate(errors);
validatePaymentType(errors);
validateReceiptDate(errors);
String accountType = (String) request.getSession().getAttribute(Constants.ACCOUNT_TYPE);
validateAccountType(errors, accountType);
validateAmount(errors);
validateModeOfPaymentSecurity(request, errors);
}
if (!errors.isEmpty()) {
request.setAttribute(Globals.ERROR_KEY, errors);
request.setAttribute("methodCalled", methodCalled);
}
return errors;
}
private void validateModeOfPaymentSecurity(HttpServletRequest request, ActionErrors errors){
UserContext userContext = (UserContext) SessionUtils.getAttribute(Constants.USER_CONTEXT_KEY, request.getSession());
if(getPaymentTypeId().equals("4") && !ActivityMapper.getInstance().isModeOfPaymentSecurity(userContext)){
errors.add(AccountConstants.LOAN_TRANSFER_PERMISSION, new ActionMessage(AccountConstants.LOAN_TRANSFER_PERMISSION,
getLocalizedMessage("accounts.mode_of_payment_permission")));
}
}
private void validateTransfer(ActionErrors errors) {
if (paymentTypeId.equals(String.valueOf(transferPaymentTypeId))
&& StringUtils.isBlank(accountForTransfer)) {
errors.add(AccountConstants.NO_ACCOUNT_FOR_TRANSFER, new ActionMessage(AccountConstants.NO_ACCOUNT_FOR_TRANSFER));
}
}
private void validateAccountType(ActionErrors errors, String accountType) {
if (accountType != null && accountType.equals(AccountTypeDto.LOAN_ACCOUNT.name())) {
if (getAmount() == null || getAmount().equals("")) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.amt")));
}
}
}
private void validatePaymentType(ActionErrors errors) {
if (StringUtils.isEmpty(getPaymentTypeId())) {
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
getLocalizedMessage("accounts.mode_of_payment")));
}
}
private void validateReceiptDate(ActionErrors errors) {
if (getReceiptDate() != null && !getReceiptDate().equals("")) {
ActionErrors validationErrors = validateDate(getReceiptDate(), getLocalizedMessage("accounts.receiptdate"));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
private void validateTransactionDate(ActionErrors errors) {
String fieldName = "accounts.date_of_trxn";
ActionErrors validationErrors = validateDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (null != validationErrors && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
if (null != getTransactionDate()){
validationErrors = validatePaymentDate(getTransactionDate(), getLocalizedMessage(fieldName));
if (validationErrors != null && !validationErrors.isEmpty()) {
errors.add(validationErrors);
}
}
}
//exposed for testing
public ActionErrors validatePaymentDate(String transactionDate, String fieldName) {
ActionErrors errors = null;
try {
if (lastPaymentDate != null && dateFallsBeforeDate(getDateAsSentFromBrowser(transactionDate), lastPaymentDate)) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
new ActionMessage(AccountConstants.ERROR_PAYMENT_DATE_BEFORE_LAST_PAYMENT,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors(); //dont add a message, since it was already added in validateDate()
}
return errors;
}
protected ActionErrors validateDate(String date, String fieldName) {
ActionErrors errors = null;
java.sql.Date sqlDate = null;
if (date != null && !date.equals("")) {
try {
sqlDate = getDateAsSentFromBrowser(date);
if (DateUtils.whichDirection(sqlDate) > 0) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_FUTUREDATE, new ActionMessage(AccountConstants.ERROR_FUTUREDATE,
fieldName));
}
} catch (InvalidDateException ide) {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_INVALIDDATE, new ActionMessage(AccountConstants.ERROR_INVALIDDATE,
fieldName));
}
} else {
errors = new ActionErrors();
errors.add(AccountConstants.ERROR_MANDATORY, new ActionMessage(AccountConstants.ERROR_MANDATORY,
fieldName));
}
return errors;
}
protected Locale getUserLocale(HttpServletRequest request) {
Locale locale = null;
HttpSession session = request.getSession();
if (session != null) {
UserContext userContext = (UserContext) session.getAttribute(LoginConstants.USERCONTEXT);
if (null != userContext) {
locale = userContext.getCurrentLocale();
}
}
return locale;
}
protected void validateAmount(ActionErrors errors) {
MifosCurrency currency = null;
if (getCurrencyId() != null && AccountingRules.isMultiCurrencyEnabled()) {
currency = AccountingRules.getCurrencyByCurrencyId(getCurrencyId());
}
DoubleConversionResult conversionResult = validateAmount(getAmount(), currency , AccountConstants.ACCOUNT_AMOUNT, errors, "");
if (amountCannotBeZero() && conversionResult.getErrors().size() == 0 && !(conversionResult.getDoubleValue() > 0.0)) {
addError(errors, AccountConstants.ACCOUNT_AMOUNT, AccountConstants.ERRORS_MUST_BE_GREATER_THAN_ZERO,
getLocalizedMessage(AccountConstants.ACCOUNT_AMOUNT));
}
}
public void setInput(String input) {
this.input = input;
}
public String getPaymentTypeId() {
return paymentTypeId;
}
public void setPaymentTypeId(String paymentTypeId) {
this.paymentTypeId = paymentTypeId;
}
public String getReceiptDate() {
return compileDateString(receiptDateDD, receiptDateMM, receiptDateYY);
}
public void setReceiptDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
receiptDateDD = null;
receiptDateMM = null;
receiptDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
receiptDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
receiptDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
receiptDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getReceiptId() {
return receiptId;
}
public void setReceiptId(String receiptId) {
this.receiptId = receiptId;
}
public String getTransactionDate() {
return compileDateString(transactionDateDD, transactionDateMM, transactionDateYY);
}
public void setTransactionDate(String receiptDate) throws InvalidDateException {
if (StringUtils.isBlank(receiptDate)) {
transactionDateDD = null;
transactionDateMM = null;
transactionDateYY = null;
} else {
Calendar cal = new GregorianCalendar();
java.sql.Date date = getDateAsSentFromBrowser(receiptDate);
cal.setTime(date);
transactionDateDD = Integer.toString(cal.get(Calendar.DAY_OF_MONTH));
transactionDateMM = Integer.toString(cal.get(Calendar.MONTH) + 1);
transactionDateYY = Integer.toString(cal.get(Calendar.YEAR));
}
}
public String getAccountId() {
return accountId;
}
public void setAccountId(String accountId) {
this.accountId = accountId;
}
public String getGlobalAccountNum() {
return globalAccountNum;
}
public void setGlobalAccountNum(String globalAccountNum) {
this.globalAccountNum = globalAccountNum;
}
protected void clear() throws InvalidDateException {
this.amount = null;
this.paymentTypeId = null;
setReceiptDate(null);
this.receiptId = null;
}
public String getReceiptDateDD() {
return receiptDateDD;
}
public void setReceiptDateDD(String receiptDateDD) {
this.receiptDateDD = receiptDateDD;
}
public String getReceiptDateMM() {
return receiptDateMM;
}
public void setReceiptDateMM(String receiptDateMM) {
this.receiptDateMM = receiptDateMM;
}
public String getReceiptDateYY() {
return receiptDateYY;
}
public void setReceiptDateYY(String receiptDateYY) {
this.receiptDateYY = receiptDateYY;
}
public String getTransactionDateDD() {
return transactionDateDD;
}
public void setTransactionDateDD(String transactionDateDD) {
this.transactionDateDD = transactionDateDD;
}
public String getTransactionDateMM() {
return transactionDateMM;
}
public void setTransactionDateMM(String transactionDateMM) {
this.transactionDateMM = transactionDateMM;
}
public String getTransactionDateYY() {
return transactionDateYY;
}
public void setTransactionDateYY(String transactionDateYY) {
this.transactionDateYY = transactionDateYY;
}
public Short getCurrencyId() {
return this.currencyId;
}
public void setCurrencyId(Short currencyId) {
this.currencyId = currencyId;
}
public String getWaiverInterest() {
return waiverInterest;
}
public void setWaiverInterest(String waiverInterest) {
this.waiverInterest = waiverInterest;
}
public LocalDate getReceiptDateAsLocalDate() throws InvalidDateException {
Date receiptDateStr = getDateAsSentFromBrowser(getReceiptDate());
return (receiptDateStr != null) ? new LocalDate(receiptDateStr.getTime()) : null;
}
public LocalDate getTrxnDateAsLocalDate() throws InvalidDateException {
return new LocalDate(getTrxnDate().getTime());
}
public Date getTrxnDate() throws InvalidDateException {
return getDateAsSentFromBrowser(getTransactionDate());
}
public void setLastPaymentDate(java.util.Date lastPaymentDate) {
this.lastPaymentDate = lastPaymentDate;
}
public String getAccountForTransfer() {
return accountForTransfer;
}
public void setAccountForTransfer(String accountForTransfer) {
this.accountForTransfer = accountForTransfer;
}
public Short getTransferPaymentTypeId() {
return transferPaymentTypeId;
}
public void setTransferPaymentTypeId(Short transferPaymentTypeId) {
this.transferPaymentTypeId = transferPaymentTypeId;
}
}
| jpodeszwik/mifos | application/src/main/java/org/mifos/accounts/struts/actionforms/AccountApplyPaymentActionForm.java | Java | apache-2.0 | 15,538 |
package org.apache.activemq.nob.filestore.uuiddir;
import org.apache.activemq.nob.filestore.BrokerFilenameDecoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.UUID;
/**
* Decoder of filenames in a UUID-based filesystem store of broker configuration files. This store only supports
* broker IDs in the form of UUIDs.
*
* Created by art on 2/19/15.
*/
public class UUIDDirectoryStoreFilenameDecoder implements BrokerFilenameDecoder {
public static final String XBEAN_FILE_PATH_SUFFIX = "-xbean.xml";
private static final Logger DEFAULT_LOGGER = LoggerFactory.getLogger(UUIDDirectoryStoreFilenameDecoder.class);
private Logger LOG = DEFAULT_LOGGER;
/**
* Decode the pathname as a UUID if it is a regular file (i.e. not a directory) and return the UUID.
*
* @param brokerPath path to the candidate broker.
* @return
*/
@Override
public String extractIdFromFilename(File brokerPath) {
String result = null;
if ( ! brokerPath.isDirectory() ) {
try {
UUID uuid = UUID.fromString(brokerPath.getName());
if (uuid != null) {
result = uuid.toString();
}
} catch ( IllegalArgumentException illegalArgExc ) {
LOG.debug("invalid UUID {}", brokerPath.getName());
}
}
return result;
}
/**
* Locate the path to the xbean configuration file for the broker at the given path. This method validates the
* broker path as it must to determine the broker ID.
*
* @param brokerPath path to the broker.
* @return path to the xbean configuration file, even if it does not exist.
*/
@Override
public File getBrokerXbeanFile(File brokerPath) {
File result = null;
String brokerId = this.extractIdFromFilename(brokerPath);
if ( brokerId != null ) {
result = new File(brokerPath.getPath() + XBEAN_FILE_PATH_SUFFIX);
}
return result;
}
}
| hzbarcea/activemq-nob | activemq-nob-filestore/src/main/java/org/apache/activemq/nob/filestore/uuiddir/UUIDDirectoryStoreFilenameDecoder.java | Java | apache-2.0 | 2,083 |
<<<<<<< HEAD
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK IT ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2014 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <liu21st@gmail.com>
// +----------------------------------------------------------------------
/**
* ThinkPHP惯例配置文件
* 该文件请不要修改,如果要覆盖惯例配置的值,可在应用配置文件中设定和惯例不符的配置项
* 配置名称大小写任意,系统会统一转换成小写
* 所有配置参数都可以在生效前动态改变
*/
defined('THINK_PATH') or exit();
return array(
/* 应用设定 */
'APP_USE_NAMESPACE' => true, // 应用类库是否使用命名空间
'APP_SUB_DOMAIN_DEPLOY' => false, // 是否开启子域名部署
'APP_SUB_DOMAIN_RULES' => array(), // 子域名部署规则
'APP_DOMAIN_SUFFIX' => '', // 域名后缀 如果是com.cn net.cn 之类的后缀必须设置
'ACTION_SUFFIX' => '', // 操作方法后缀
'MULTI_MODULE' => true, // 是否允许多模块 如果为false 则必须设置 DEFAULT_MODULE
'MODULE_DENY_LIST' => array('Common','Runtime'),
'CONTROLLER_LEVEL' => 1,
'APP_AUTOLOAD_LAYER' => 'Controller,Model', // 自动加载的应用类库层 关闭APP_USE_NAMESPACE后有效
'APP_AUTOLOAD_PATH' => '', // 自动加载的路径 关闭APP_USE_NAMESPACE后有效
/* Cookie设置 */
'COOKIE_EXPIRE' => 0, // Cookie有效期
'COOKIE_DOMAIN' => '', // Cookie有效域名
'COOKIE_PATH' => '/', // Cookie路径
'COOKIE_PREFIX' => '', // Cookie前缀 避免冲突
'COOKIE_SECURE' => false, // Cookie安全传输
'COOKIE_HTTPONLY' => '', // Cookie httponly设置
/* 默认设定 */
'DEFAULT_M_LAYER' => 'Model', // 默认的模型层名称
'DEFAULT_C_LAYER' => 'Controller', // 默认的控制器层名称
'DEFAULT_V_LAYER' => 'View', // 默认的视图层名称
'DEFAULT_LANG' => 'zh-cn', // 默认语言
'DEFAULT_THEME' => '', // 默认模板主题名称
'DEFAULT_MODULE' => 'Home', // 默认模块
'DEFAULT_CONTROLLER' => 'Index', // 默认控制器名称
'DEFAULT_ACTION' => 'index', // 默认操作名称
'DEFAULT_CHARSET' => 'utf-8', // 默认输出编码
'DEFAULT_TIMEZONE' => 'PRC', // 默认时区
'DEFAULT_AJAX_RETURN' => 'JSON', // 默认AJAX 数据返回格式,可选JSON XML ...
'DEFAULT_JSONP_HANDLER' => 'jsonpReturn', // 默认JSONP格式返回的处理方法
'DEFAULT_FILTER' => 'htmlspecialchars', // 默认参数过滤方法 用于I函数...
/* 数据库设置 */
'DB_TYPE' => '', // 数据库类型
'DB_HOST' => '', // 服务器地址
'DB_NAME' => '', // 数据库名
'DB_USER' => '', // 用户名
'DB_PWD' => '', // 密码
'DB_PORT' => '', // 端口
'DB_PREFIX' => '', // 数据库表前缀
'DB_PARAMS' => array(), // 数据库连接参数
'DB_DEBUG' => TRUE, // 数据库调试模式 开启后可以记录SQL日志
'DB_FIELDS_CACHE' => true, // 启用字段缓存
'DB_CHARSET' => 'utf8', // 数据库编码默认采用utf8
'DB_DEPLOY_TYPE' => 0, // 数据库部署方式:0 集中式(单一服务器),1 分布式(主从服务器)
'DB_RW_SEPARATE' => false, // 数据库读写是否分离 主从式有效
'DB_MASTER_NUM' => 1, // 读写分离后 主服务器数量
'DB_SLAVE_NO' => '', // 指定从服务器序号
/* 数据缓存设置 */
'DATA_CACHE_TIME' => 0, // 数据缓存有效期 0表示永久缓存
'DATA_CACHE_COMPRESS' => false, // 数据缓存是否压缩缓存
'DATA_CACHE_CHECK' => false, // 数据缓存是否校验缓存
'DATA_CACHE_PREFIX' => '', // 缓存前缀
'DATA_CACHE_TYPE' => 'File', // 数据缓存类型,支持:File|Db|Apc|Memcache|Shmop|Sqlite|Xcache|Apachenote|Eaccelerator
'DATA_CACHE_PATH' => TEMP_PATH,// 缓存路径设置 (仅对File方式缓存有效)
'DATA_CACHE_KEY' => '', // 缓存文件KEY (仅对File方式缓存有效)
'DATA_CACHE_SUBDIR' => false, // 使用子目录缓存 (自动根据缓存标识的哈希创建子目录)
'DATA_PATH_LEVEL' => 1, // 子目录缓存级别
/* 错误设置 */
'ERROR_MESSAGE' => '页面错误!请稍后再试~',//错误显示信息,非调试模式有效
'ERROR_PAGE' => '', // 错误定向页面
'SHOW_ERROR_MSG' => false, // 显示错误信息
'TRACE_MAX_RECORD' => 100, // 每个级别的错误信息 最大记录数
/* 日志设置 */
'LOG_RECORD' => false, // 默认不记录日志
'LOG_TYPE' => 'File', // 日志记录类型 默认为文件方式
'LOG_LEVEL' => 'EMERG,ALERT,CRIT,ERR',// 允许记录的日志级别
'LOG_FILE_SIZE' => 2097152, // 日志文件大小限制
'LOG_EXCEPTION_RECORD' => false, // 是否记录异常信息日志
/* SESSION设置 */
'SESSION_AUTO_START' => true, // 是否自动开启Session
'SESSION_OPTIONS' => array(), // session 配置数组 支持type name id path expire domain 等参数
'SESSION_TYPE' => '', // session hander类型 默认无需设置 除非扩展了session hander驱动
'SESSION_PREFIX' => '', // session 前缀
//'VAR_SESSION_ID' => 'session_id', //sessionID的提交变量
/* 模板引擎设置 */
'TMPL_CONTENT_TYPE' => 'text/html', // 默认模板输出类型
'TMPL_ACTION_ERROR' => THINK_PATH.'Tpl/dispatch_jump.tpl', // 默认错误跳转对应的模板文件
'TMPL_ACTION_SUCCESS' => THINK_PATH.'Tpl/dispatch_jump.tpl', // 默认成功跳转对应的模板文件
'TMPL_EXCEPTION_FILE' => THINK_PATH.'Tpl/think_exception.tpl',// 异常页面的模板文件
'TMPL_DETECT_THEME' => false, // 自动侦测模板主题
'TMPL_TEMPLATE_SUFFIX' => '.html', // 默认模板文件后缀
'TMPL_FILE_DEPR' => '/', //模板文件CONTROLLER_NAME与ACTION_NAME之间的分割符
// 布局设置
'TMPL_ENGINE_TYPE' => 'Think', // 默认模板引擎 以下设置仅对使用Think模板引擎有效
'TMPL_CACHFILE_SUFFIX' => '.php', // 默认模板缓存后缀
'TMPL_DENY_FUNC_LIST' => 'echo,exit', // 模板引擎禁用函数
'TMPL_DENY_PHP' => false, // 默认模板引擎是否禁用PHP原生代码
'TMPL_L_DELIM' => '{', // 模板引擎普通标签开始标记
'TMPL_R_DELIM' => '}', // 模板引擎普通标签结束标记
'TMPL_VAR_IDENTIFY' => 'array', // 模板变量识别。留空自动判断,参数为'obj'则表示对象
'TMPL_STRIP_SPACE' => true, // 是否去除模板文件里面的html空格与换行
'TMPL_CACHE_ON' => true, // 是否开启模板编译缓存,设为false则每次都会重新编译
'TMPL_CACHE_PREFIX' => '', // 模板缓存前缀标识,可以动态改变
'TMPL_CACHE_TIME' => 0, // 模板缓存有效期 0 为永久,(以数字为值,单位:秒)
'TMPL_LAYOUT_ITEM' => '{__CONTENT__}', // 布局模板的内容替换标识
'LAYOUT_ON' => false, // 是否启用布局
'LAYOUT_NAME' => 'layout', // 当前布局名称 默认为layout
// Think模板引擎标签库相关设定
'TAGLIB_BEGIN' => '<', // 标签库标签开始标记
'TAGLIB_END' => '>', // 标签库标签结束标记
'TAGLIB_LOAD' => true, // 是否使用内置标签库之外的其它标签库,默认自动检测
'TAGLIB_BUILD_IN' => 'cx', // 内置标签库名称(标签使用不必指定标签库名称),以逗号分隔 注意解析顺序
'TAGLIB_PRE_LOAD' => '', // 需要额外加载的标签库(须指定标签库名称),多个以逗号分隔
/* URL设置 */
'URL_CASE_INSENSITIVE' => true, // 默认false 表示URL区分大小写 true则表示不区分大小写
'URL_MODEL' => 1, // URL访问模式,可选参数0、1、2、3,代表以下四种模式:
// 0 (普通模式); 1 (PATHINFO 模式); 2 (REWRITE 模式); 3 (兼容模式) 默认为PATHINFO 模式
'URL_PATHINFO_DEPR' => '/', // PATHINFO模式下,各参数之间的分割符号
'URL_PATHINFO_FETCH' => 'ORIG_PATH_INFO,REDIRECT_PATH_INFO,REDIRECT_URL', // 用于兼容判断PATH_INFO 参数的SERVER替代变量列表
'URL_REQUEST_URI' => 'REQUEST_URI', // 获取当前页面地址的系统变量 默认为REQUEST_URI
'URL_HTML_SUFFIX' => 'html', // URL伪静态后缀设置
'URL_DENY_SUFFIX' => 'ico|png|gif|jpg', // URL禁止访问的后缀设置
'URL_PARAMS_BIND' => true, // URL变量绑定到Action方法参数
'URL_PARAMS_BIND_TYPE' => 0, // URL变量绑定的类型 0 按变量名绑定 1 按变量顺序绑定
'URL_PARAMS_FILTER' => false, // URL变量绑定过滤
'URL_PARAMS_FILTER_TYPE'=> '', // URL变量绑定过滤方法 如果为空 调用DEFAULT_FILTER
'URL_ROUTER_ON' => false, // 是否开启URL路由
'URL_ROUTE_RULES' => array(), // 默认路由规则 针对模块
'URL_MAP_RULES' => array(), // URL映射定义规则
/* 系统变量名称设置 */
'VAR_MODULE' => 'm', // 默认模块获取变量
'VAR_ADDON' => 'addon', // 默认的插件控制器命名空间变量
'VAR_CONTROLLER' => 'c', // 默认控制器获取变量
'VAR_ACTION' => 'a', // 默认操作获取变量
'VAR_AJAX_SUBMIT' => 'ajax', // 默认的AJAX提交变量
'VAR_JSONP_HANDLER' => 'callback',
'VAR_PATHINFO' => 's', // 兼容模式PATHINFO获取变量例如 ?s=/module/action/id/1 后面的参数取决于URL_PATHINFO_DEPR
'VAR_TEMPLATE' => 't', // 默认模板切换变量
'VAR_AUTO_STRING' => false, // 输入变量是否自动强制转换为字符串 如果开启则数组变量需要手动传入变量修饰符获取变量
'HTTP_CACHE_CONTROL' => 'private', // 网页缓存控制
'CHECK_APP_DIR' => true, // 是否检查应用目录是否创建
'FILE_UPLOAD_TYPE' => 'Local', // 文件上传方式
'DATA_CRYPT_TYPE' => 'Think', // 数据加密方式
);
=======
<?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK IT ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2014 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: liu21st <liu21st@gmail.com>
// +----------------------------------------------------------------------
/**
* ThinkPHP惯例配置文件
* 该文件请不要修改,如果要覆盖惯例配置的值,可在应用配置文件中设定和惯例不符的配置项
* 配置名称大小写任意,系统会统一转换成小写
* 所有配置参数都可以在生效前动态改变
*/
defined('THINK_PATH') or exit();
return array(
/* 应用设定 */
'APP_USE_NAMESPACE' => true, // 应用类库是否使用命名空间
'APP_SUB_DOMAIN_DEPLOY' => false, // 是否开启子域名部署
'APP_SUB_DOMAIN_RULES' => array(), // 子域名部署规则
'APP_DOMAIN_SUFFIX' => '', // 域名后缀 如果是com.cn net.cn 之类的后缀必须设置
'ACTION_SUFFIX' => '', // 操作方法后缀
'MULTI_MODULE' => true, // 是否允许多模块 如果为false 则必须设置 DEFAULT_MODULE
'MODULE_DENY_LIST' => array('Common','Runtime'),
'CONTROLLER_LEVEL' => 1,
'APP_AUTOLOAD_LAYER' => 'Controller,Model', // 自动加载的应用类库层 关闭APP_USE_NAMESPACE后有效
'APP_AUTOLOAD_PATH' => '', // 自动加载的路径 关闭APP_USE_NAMESPACE后有效
/* Cookie设置 */
'COOKIE_EXPIRE' => 0, // Cookie有效期
'COOKIE_DOMAIN' => '', // Cookie有效域名
'COOKIE_PATH' => '/', // Cookie路径
'COOKIE_PREFIX' => '', // Cookie前缀 避免冲突
'COOKIE_SECURE' => false, // Cookie安全传输
'COOKIE_HTTPONLY' => '', // Cookie httponly设置
/* 默认设定 */
'DEFAULT_M_LAYER' => 'Model', // 默认的模型层名称
'DEFAULT_C_LAYER' => 'Controller', // 默认的控制器层名称
'DEFAULT_V_LAYER' => 'View', // 默认的视图层名称
'DEFAULT_LANG' => 'zh-cn', // 默认语言
'DEFAULT_THEME' => '', // 默认模板主题名称
'DEFAULT_MODULE' => 'Home', // 默认模块
'DEFAULT_CONTROLLER' => 'Index', // 默认控制器名称
'DEFAULT_ACTION' => 'index', // 默认操作名称
'DEFAULT_CHARSET' => 'utf-8', // 默认输出编码
'DEFAULT_TIMEZONE' => 'PRC', // 默认时区
'DEFAULT_AJAX_RETURN' => 'JSON', // 默认AJAX 数据返回格式,可选JSON XML ...
'DEFAULT_JSONP_HANDLER' => 'jsonpReturn', // 默认JSONP格式返回的处理方法
'DEFAULT_FILTER' => 'htmlspecialchars', // 默认参数过滤方法 用于I函数...
/* 数据库设置 */
'DB_TYPE' => '', // 数据库类型
'DB_HOST' => '', // 服务器地址
'DB_NAME' => '', // 数据库名
'DB_USER' => '', // 用户名
'DB_PWD' => '', // 密码
'DB_PORT' => '', // 端口
'DB_PREFIX' => '', // 数据库表前缀
'DB_PARAMS' => array(), // 数据库连接参数
'DB_DEBUG' => TRUE, // 数据库调试模式 开启后可以记录SQL日志
'DB_FIELDS_CACHE' => true, // 启用字段缓存
'DB_CHARSET' => 'utf8', // 数据库编码默认采用utf8
'DB_DEPLOY_TYPE' => 0, // 数据库部署方式:0 集中式(单一服务器),1 分布式(主从服务器)
'DB_RW_SEPARATE' => false, // 数据库读写是否分离 主从式有效
'DB_MASTER_NUM' => 1, // 读写分离后 主服务器数量
'DB_SLAVE_NO' => '', // 指定从服务器序号
/* 数据缓存设置 */
'DATA_CACHE_TIME' => 0, // 数据缓存有效期 0表示永久缓存
'DATA_CACHE_COMPRESS' => false, // 数据缓存是否压缩缓存
'DATA_CACHE_CHECK' => false, // 数据缓存是否校验缓存
'DATA_CACHE_PREFIX' => '', // 缓存前缀
'DATA_CACHE_TYPE' => 'File', // 数据缓存类型,支持:File|Db|Apc|Memcache|Shmop|Sqlite|Xcache|Apachenote|Eaccelerator
'DATA_CACHE_PATH' => TEMP_PATH,// 缓存路径设置 (仅对File方式缓存有效)
'DATA_CACHE_KEY' => '', // 缓存文件KEY (仅对File方式缓存有效)
'DATA_CACHE_SUBDIR' => false, // 使用子目录缓存 (自动根据缓存标识的哈希创建子目录)
'DATA_PATH_LEVEL' => 1, // 子目录缓存级别
/* 错误设置 */
'ERROR_MESSAGE' => '页面错误!请稍后再试~',//错误显示信息,非调试模式有效
'ERROR_PAGE' => '', // 错误定向页面
'SHOW_ERROR_MSG' => false, // 显示错误信息
'TRACE_MAX_RECORD' => 100, // 每个级别的错误信息 最大记录数
/* 日志设置 */
'LOG_RECORD' => false, // 默认不记录日志
'LOG_TYPE' => 'File', // 日志记录类型 默认为文件方式
'LOG_LEVEL' => 'EMERG,ALERT,CRIT,ERR',// 允许记录的日志级别
'LOG_FILE_SIZE' => 2097152, // 日志文件大小限制
'LOG_EXCEPTION_RECORD' => false, // 是否记录异常信息日志
/* SESSION设置 */
'SESSION_AUTO_START' => true, // 是否自动开启Session
'SESSION_OPTIONS' => array(), // session 配置数组 支持type name id path expire domain 等参数
'SESSION_TYPE' => '', // session hander类型 默认无需设置 除非扩展了session hander驱动
'SESSION_PREFIX' => '', // session 前缀
//'VAR_SESSION_ID' => 'session_id', //sessionID的提交变量
/* 模板引擎设置 */
'TMPL_CONTENT_TYPE' => 'text/html', // 默认模板输出类型
'TMPL_ACTION_ERROR' => THINK_PATH.'Tpl/dispatch_jump.tpl', // 默认错误跳转对应的模板文件
'TMPL_ACTION_SUCCESS' => THINK_PATH.'Tpl/dispatch_jump.tpl', // 默认成功跳转对应的模板文件
'TMPL_EXCEPTION_FILE' => THINK_PATH.'Tpl/think_exception.tpl',// 异常页面的模板文件
'TMPL_DETECT_THEME' => false, // 自动侦测模板主题
'TMPL_TEMPLATE_SUFFIX' => '.html', // 默认模板文件后缀
'TMPL_FILE_DEPR' => '/', //模板文件CONTROLLER_NAME与ACTION_NAME之间的分割符
// 布局设置
'TMPL_ENGINE_TYPE' => 'Think', // 默认模板引擎 以下设置仅对使用Think模板引擎有效
'TMPL_CACHFILE_SUFFIX' => '.php', // 默认模板缓存后缀
'TMPL_DENY_FUNC_LIST' => 'echo,exit', // 模板引擎禁用函数
'TMPL_DENY_PHP' => false, // 默认模板引擎是否禁用PHP原生代码
'TMPL_L_DELIM' => '{', // 模板引擎普通标签开始标记
'TMPL_R_DELIM' => '}', // 模板引擎普通标签结束标记
'TMPL_VAR_IDENTIFY' => 'array', // 模板变量识别。留空自动判断,参数为'obj'则表示对象
'TMPL_STRIP_SPACE' => true, // 是否去除模板文件里面的html空格与换行
'TMPL_CACHE_ON' => true, // 是否开启模板编译缓存,设为false则每次都会重新编译
'TMPL_CACHE_PREFIX' => '', // 模板缓存前缀标识,可以动态改变
'TMPL_CACHE_TIME' => 0, // 模板缓存有效期 0 为永久,(以数字为值,单位:秒)
'TMPL_LAYOUT_ITEM' => '{__CONTENT__}', // 布局模板的内容替换标识
'LAYOUT_ON' => false, // 是否启用布局
'LAYOUT_NAME' => 'layout', // 当前布局名称 默认为layout
// Think模板引擎标签库相关设定
'TAGLIB_BEGIN' => '<', // 标签库标签开始标记
'TAGLIB_END' => '>', // 标签库标签结束标记
'TAGLIB_LOAD' => true, // 是否使用内置标签库之外的其它标签库,默认自动检测
'TAGLIB_BUILD_IN' => 'cx', // 内置标签库名称(标签使用不必指定标签库名称),以逗号分隔 注意解析顺序
'TAGLIB_PRE_LOAD' => '', // 需要额外加载的标签库(须指定标签库名称),多个以逗号分隔
/* URL设置 */
'URL_CASE_INSENSITIVE' => true, // 默认false 表示URL区分大小写 true则表示不区分大小写
'URL_MODEL' => 1, // URL访问模式,可选参数0、1、2、3,代表以下四种模式:
// 0 (普通模式); 1 (PATHINFO 模式); 2 (REWRITE 模式); 3 (兼容模式) 默认为PATHINFO 模式
'URL_PATHINFO_DEPR' => '/', // PATHINFO模式下,各参数之间的分割符号
'URL_PATHINFO_FETCH' => 'ORIG_PATH_INFO,REDIRECT_PATH_INFO,REDIRECT_URL', // 用于兼容判断PATH_INFO 参数的SERVER替代变量列表
'URL_REQUEST_URI' => 'REQUEST_URI', // 获取当前页面地址的系统变量 默认为REQUEST_URI
'URL_HTML_SUFFIX' => 'html', // URL伪静态后缀设置
'URL_DENY_SUFFIX' => 'ico|png|gif|jpg', // URL禁止访问的后缀设置
'URL_PARAMS_BIND' => true, // URL变量绑定到Action方法参数
'URL_PARAMS_BIND_TYPE' => 0, // URL变量绑定的类型 0 按变量名绑定 1 按变量顺序绑定
'URL_PARAMS_FILTER' => false, // URL变量绑定过滤
'URL_PARAMS_FILTER_TYPE'=> '', // URL变量绑定过滤方法 如果为空 调用DEFAULT_FILTER
'URL_ROUTER_ON' => false, // 是否开启URL路由
'URL_ROUTE_RULES' => array(), // 默认路由规则 针对模块
'URL_MAP_RULES' => array(), // URL映射定义规则
/* 系统变量名称设置 */
'VAR_MODULE' => 'm', // 默认模块获取变量
'VAR_ADDON' => 'addon', // 默认的插件控制器命名空间变量
'VAR_CONTROLLER' => 'c', // 默认控制器获取变量
'VAR_ACTION' => 'a', // 默认操作获取变量
'VAR_AJAX_SUBMIT' => 'ajax', // 默认的AJAX提交变量
'VAR_JSONP_HANDLER' => 'callback',
'VAR_PATHINFO' => 's', // 兼容模式PATHINFO获取变量例如 ?s=/module/action/id/1 后面的参数取决于URL_PATHINFO_DEPR
'VAR_TEMPLATE' => 't', // 默认模板切换变量
'VAR_AUTO_STRING' => false, // 输入变量是否自动强制转换为字符串 如果开启则数组变量需要手动传入变量修饰符获取变量
'HTTP_CACHE_CONTROL' => 'private', // 网页缓存控制
'CHECK_APP_DIR' => true, // 是否检查应用目录是否创建
'FILE_UPLOAD_TYPE' => 'Local', // 文件上传方式
'DATA_CRYPT_TYPE' => 'Think', // 数据加密方式
);
>>>>>>> 98b56c4fd74d8c0bc5ebc0c352b176e8d8f7d926
| zhoujiangyou/happychou | Inc/Conf/convention.php | PHP | apache-2.0 | 22,628 |
"use strict";
import {Vector2, Vector3, Matrix4, Vector4} from 'vectormath';
/*
NOTE: this was originally a WebGL UI library I wrote.
it's gone through several transitions since then, and
is now a canvas2d UI library (bleh). the code is quite
horrible.
*/
#include "src/utils/utildefine.js"
//we keep track of any canvases with non-GC managed data,
//(gl objects, TriListAlloc, TA_Alloc, etc) to avoid reference leaks
//g_app_state.reset calls .destroy() on all canvases inside this list.
//(then resets it back to {}).
window.active_canvases = {};
window._canvas_draw_id = 1;
//disable use of theoretically faster typed array allocator,
//for now.
//#ifdef NOCACHE
#define F32ALLOC(verts) new Float32Array(verts);
#define F32FREE(verts) verts = undefined;
/*#else
-#define F32ALLOC(verts123) f32_alloc.from_array(verts123);
-#define F32FREE(verts123) if (verts123 != undefined) { f32_alloc.free(verts123); verts123 = undefined;}
#endif
*/
//
//
//stupid statics
var _trilist_n0 = new Vector3(); var _trilist_n1 = new Vector3()
var _trilist_n2 = new Vector3(); var _trilist_n3 = new Vector3()
var _trilist_v1 = new Vector3(); var _trilist_v2 = new Vector3()
var _trilist_v3 = new Vector3(); var _trilist_v4 = new Vector3()
var _trilist_c1 = new Vector4(); var _trilist_c2 = new Vector4()
var _trilist_c3 = new Vector4(); var _trilist_c4 = new Vector4()
var _trilist_v5 = new Vector3(); var _trilist_v6 = new Vector3();
var _trilist_v7 = new Vector3(); var _trilist_v8 = new Vector3();
var _trilist_v9 = new Vector3();
#define TRILIST_CACHE_SIZE 8192
/*I hate garbage collected languages. Bleh! This class
is necessary to avoid object allocations within draw frames.
evil!*/
export class TriListAlloc {
constructor() {
this.freelist = [];
this.freecount = 0;
this.usedcount = 0;
this.peakcount = 0;
}
alloc(UICanvas canvas, Matrix4 transmat, Boolean use_small_icons=false) : TriList {
this.peakcount = Math.max(this.peakcount, this.usedcount+1);
#ifdef NOCACHE
return new TriList(canvas, transmat, use_small_icons);
#endif
if (this.freecount == 0) {
//ensure a saturated cache
if (this.usedcount == 0) {
for (var i=0; i<TRILIST_CACHE_SIZE; i++) {
var tl = new TriList(canvas, transmat, use_small_icons);
tl.cache_destroy();
this.freelist.push(tl);
this.freecount++;
}
}
this.usedcount++;
return new TriList(canvas, transmat, use_small_icons);
} else {
//console.log("using cached trilist", this.freecount, this.freelist.length);
var ret = this.freelist.pop();
ret.cache_init(canvas, transmat, use_small_icons);
this.freecount--;
this.usedcount++;
return ret;
}
}
free(TriList trilist) {
this.usedcount--;
#ifdef NOCACHE
trilist.cache_destroy();
return;
#endif
//abandon trilist to the GC
if (this.freecount >= TRILIST_CACHE_SIZE)
return;
trilist.cache_destroy();
this.freelist.push(trilist);
this.freecount++;
}
}
var _talloc = new TriListAlloc();
/*
TriList is being refactored to make it more usable.
vertex buffers will be reused, and transformation
matrices will be passed in at draw time, not applied
at vertex generation time.
*/
export class TriListRef {
constructor(gl, TriList trilist, Matrix4 mat, UICanvas canvas) {
this.trilist = trilist;
this.mat = mat;
this.workmat = new Matrix4();
this.gl = gl;
this.canvas = canvas;
}
destroy() {
}
on_draw(WebGLRenderingContext gl) {
this.workmat.load(this.mat);
this.workmat.multiply(this.canvas.global_matrix);
this.trilist.global_matrix = this.workmat;
this.trilist.on_draw(gl);
}
}
export class TriListCache {
constructor(limit=100) {
this.cache = {};
this.length = 0;
this.limit = limit;
}
get(String key) : TriList {
return this.cache[key];
}
has(String key) : Boolean {
return key in this.cache;
}
set(String key, TriList trilist) {
if (!(key in this.cache)) {
this.length++;
}
this.cache[key] = trilist;
}
remove(String key) {
if (key in this.cache) {
this.cache[key].destroy();
this.length--;
delete this.cache[key];
}
}
destroy() {
for (var k in this.cache) {
var tl = this.cache[k];
tl.destroy();
}
this.cache = {};
this.length = 0;
}
on_gl_lost() {
this.length = 0;
this.cache = {};
}
}
export class TriList {
cache_destroy() {
this._free_typed();
this.verts.length = 0;
this.texcos.length = 0;
this.colors.length = 0;
this.tottri = 0;
//this.canvas = undefined;
//this.iconsheet = undefined;
//this.viewport = undefined;
this._dead = true;
}
_free_typed() {
//f32free sets vertbuf/colorbuf/texbuf to undefined
F32FREE(this.vertbuf);
F32FREE(this.colorbuf);
F32FREE(this.texbuf);
}
cache_init(UICanvas canvas, Matrix4 transmat, Boolean use_small_icons=false) {
this._dead = false;
this.transmat = transmat;
this.global_matrix = canvas.global_matrix;
this.use_tex = 1;
this.tex = 0 : WebGLTexture;
this.iconsheet = use_small_icons ? g_app_state.raster.iconsheet16 : g_app_state.raster.iconsheet;
this.small_icons = use_small_icons;
this.verts.length = 0;
this.colors.length = 0;
this.texcos.length = 0;
this.recalc = 1
this.tottri = 0;
this.canvas = canvas
this.spos = undefined : Array<float>;
this.ssize = undefined : Array<float>;
this.gl_spos = undefined : Array<float>;
this.gl_ssize = undefined : Array<float>;
this.viewport = canvas != undefined ? canvas.viewport : undefined;
}
constructor(UICanvas canvas, Matrix4 transmat, Boolean use_small_icons=false) {
this._id = _canvas_draw_id++;
this.transmat = transmat;
this.global_matrix = canvas.global_matrix;
this.verts = [];
this.colors = [];
this.texcos = [];
this._dead = false;
this.vertbuf = undefined;
this.colorbuf = undefined;
this.texbuf = undefined;
this.use_tex = 1;
this.tex = 0 : WebGLTexture;
this.iconsheet = use_small_icons ? g_app_state.raster.iconsheet16 : g_app_state.raster.iconsheet;
this.small_icons = use_small_icons;
this.recalc = 1
this.tottri = 0;
this.canvas = canvas
this.spos = undefined : Array<float>;
this.ssize = undefined : Array<float>;
this.gl_spos = undefined : Array<float>;
this.gl_ssize = undefined : Array<float>;
this.viewport = canvas != undefined ? canvas.viewport : undefined;
}
add_tri(Array<float> v1, Array<float> v2, Array<float> v3,
Array<float> c1, Array<float> c2, Array<float> c3,
Array<float> t1, Array<float> t2, Array<float> t3)
{
var vs = this.verts;
this.tottri++;
static v12 = new Vector3();
static v22 = new Vector3();
static v32 = new Vector3();
v12.loadxy(v1); v22.loadxy(v2); v32.loadxy(v3);
v1 = v12; v2 = v22; v3 = v32;
this.transform(v1); this.transform(v2); this.transform(v3);
vs.push(v1[0]); vs.push(v1[1]); vs.push(v1[2]);
vs.push(v2[0]); vs.push(v2[1]); vs.push(v2[2]);
vs.push(v3[0]); vs.push(v3[1]); vs.push(v3[2]);
var cs = this.colors
if (c2 == undefined) {
c2 = c1;
c3 = c1;
}
cs.push(c1[0]); cs.push(c1[1]); cs.push(c1[2]); cs.push(c1[3]);
cs.push(c2[0]); cs.push(c2[1]); cs.push(c2[2]); cs.push(c2[3]);
cs.push(c3[0]); cs.push(c3[1]); cs.push(c3[2]); cs.push(c3[3]);
if (this.use_tex) {
if (t1 == undefined) {
static negone = [-1, -1];
t1 = t2 = t3 = negone;
}
var ts = this.texcos
ts.push(t1[0]); ts.push(t1[1]); ts.push(t2[0]); ts.push(t2[1]);
ts.push(t3[0]); ts.push(t3[1])
}
}
add_quad(Vector3 v1, Vector3 v2, Vector3 v3, Vector3 v4,
Array<float> c1,Array<float> c2,Array<float> c3,
Array<float> c4,Array<float> t1,Array<float> t2,
Array<float> t3,Array<float> t4)
{
this.add_tri(v1, v2, v3, c1, c2, c3, t1, t2, t3);
this.add_tri(v1, v3, v4, c1, c3, c4, t1, t3, t4);
}
icon_quad(int icon, Vector3 pos, Array<Array<float>> cs)
{
static tcos = new Array(0);
//var clr = [1, 1, 1, 1];
//var cs = [clr, clr, clr, clr];
var cw = this.iconsheet.cellsize[0], ch = this.iconsheet.cellsize[1];
var v1 = new Vector3([pos[0], pos[1], 0.0]);
var v2 = new Vector3([pos[0], pos[1]+ch, 0.0]);
var v3 = new Vector3([pos[0]+cw, pos[1]+ch, 0.0]);
var v4 = new Vector3([pos[0]+cw, pos[1], 0.0]);
tcos.length = 0;
this.iconsheet.gen_tile(icon, tcos);
var t1 = new Vector3([tcos[0], tcos[1], 0]);
var t2 = new Vector3([tcos[2], tcos[3], 0]);
var t3 = new Vector3([tcos[4], tcos[5], 0]);
var t4 = new Vector3([tcos[6], tcos[7], 0]);
var t5 = new Vector3([tcos[8], tcos[9], 0]);
var t6 = new Vector3([tcos[10], tcos[11], 0]);
this.add_tri(v1, v2, v3, cs[0], cs[1], cs[2], t1, t2, t3);
this.add_tri(v1, v3, v4, cs[0], cs[2], cs[3], t4, t5, t6);
}
transform(v) {
static transvec = new Vector3();
transvec[0] = v[0];
transvec[1] = v[1];
transvec[2] = 0.0;
transvec.multVecMatrix(this.transmat);
v[0] = (transvec[0]/this.viewport[1][0])*2.0 - 1.0;
v[1] = (transvec[1]/this.viewport[1][1])*2.0 - 1.0;
}
line(v1, v2, c1, c2=undefined, width=undefined) { //c2 and width are optional
if (c2 == undefined) {
c2 = c1;
}
if (v1.length == 2) v1.push(0);
if (v2.length == 2) v2.push(0);
this.line_strip(CACHEARR2(CACHEARR2(v1, v2), CACHEARR2(c1, c2)), undefined, width);
//this.line_strip(objcache.getarr(objcache.getarr(v1, v2), objcache.getarr(c1, c2)), undefined, width);
}
line_strip(lines, colors, texcos=undefined, width=2.0, half=false) {
static black = new Vector4([0.0, 0.0, 0.0, 1.0]);
static v0 = new Vector3(), v1 = new Vector3(), v2 = new Vector3();
static v3 = new Vector3(), v4 = new Vector3(), n0 = new Vector3();
static n1 = new Vector3(), n2 = new Vector3(), c3 = new Vector3();
static c4 = new Vector3();
for (var i =0; i<lines.length; i++) {
var lc1 = colors[i][0], lc2 = colors[i][1];
//if (lines[i][0].length == 2) lines[i][0].push(0);
//if (lines[i][1].length == 2) lines[i][1].push(0);
if (lc1 == undefined) lc1 = black;
if (lc2 == undefined) lc2 = black;
var z = 0.0;
v1.loadxy(lines[i][0])
v2.loadxy(lines[i][1])
n0.zero(); n1.zero(); n2.zero();
v1.loadxy(lines[i][1]);
v1.sub(lines[i][0]);
v1.normalize();
n1[0] = v1[1];
n1[1] = -v1[0];
n1[2] = z;
n1.normalize()
if (i > 0) {
v0.loadxy(lines[i-1][1]);
v0.sub(lines[i-1][0])
v0.normalize();
n0[0] = v0[1];
n0[1] = -v0[0];
n0[2] = z;
n0.normalize()
} else {
n0.load(n1);
}
v1.loadxy(lines[i][1]);
v1.sub(lines[i][0])
if (i < lines.length-1) {
v3.loadxy(lines[i+1][1]);
v3.sub(lines[i+1][0]);
v3.normalize();
n2[0] = v3[1];
n2[1] = -v3[0];
n2[2] = z;
n2.normalize()
} else {
n2.load(n1);
}
/*
n0.normalize();
n1.normalize();
n2.normalize();
n0.mulScalar(0.5);
n1.mulScalar(0.5);
n2.mulScalar(0.5);
*/
n2.add(n1).normalize();
n1.add(n0).normalize();
n1.mulScalar(width*0.5);
n2.mulScalar(width*0.5);
v0.loadxy(lines[i][0]);
v1.loadxy(lines[i][1]);
v2.loadxy(lines[i][1]);
v2.add(n1);
v3.loadxy(lines[i][0]);
v3.add(n2);
var c1 = _trilist_c1.load(lc1); var c2 = _trilist_c2.load(lc2);
var c3 = _trilist_c3.load(lc2); var c4 = _trilist_c4.load(lc1);
if (width >= 1.5) {
c3[3] = 0.0;
c4[3] = 0.0;
}
n1.mulScalar(2.0);
n2.mulScalar(2.0);
if (this.use_tex && texcos) {
if (!half)
this.add_quad(v0, v1, v2, v3, c1, c2, c3, c4, texcos[i][0],
texcos[i][1], texcos[i][0], texcos[i][1]);
this.add_quad(v1, v0, v3.sub(n1), v2.sub(n2), c2, c1, c3, c4, texcos[i][0],
texcos[i][1], texcos[i][0], texcos[i][1]);
} else {
if (!half)
this.add_quad(v0, v1, v2, v3, c1, c2, c3, c4);
this.add_quad(v1, v0, v3.sub(n2), v2.sub(n1), c2, c1, c3, c4);
}
}
}
destroy(Boolean only_gl=false) {
var gl = g_app_state.gl;
if (this.vbuf) {
gl.deleteBuffer(this.vbuf);
gl.deleteBuffer(this.cbuf);
}
if (this.tbuf) {
gl.deleteBuffer(this.tbuf);
}
this.vbuf = this.cbuf = this.tbuf = undefined;
this.recalc = 1;
this._free_typed();
if (!only_gl) {
this._dead = true;
_talloc.free(this);
}
}
gen_buffers(gl) {
if (this.verts.length == 0)
return;
this.destroy(true);
this._free_typed();
this._dead = false;
this.vertbuf = F32ALLOC(this.verts); //new Float32Array(this.verts)
this.colorbuf = F32ALLOC(this.colors); //new Float32Array(this.colors)
if (this.use_tex)
this.texbuf = F32ALLOC(this.texcos); //new Float32Array(this.texcos);
gl.enableVertexAttribArray(0);
gl.enableVertexAttribArray(1);
if (this.use_tex)
gl.enableVertexAttribArray(2);
else
gl.disableVertexAttribArray(2);
var vbuf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbuf);
gl.bufferData(gl.ARRAY_BUFFER, this.vertbuf, gl.STATIC_DRAW);
gl.vertexAttribPointer(0, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, vbuf);
var cbuf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, cbuf);
gl.bufferData(gl.ARRAY_BUFFER, this.colorbuf, gl.STATIC_DRAW);
gl.vertexAttribPointer(1, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, cbuf);
if (this.use_tex) {
var tbuf = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, tbuf);
gl.bufferData(gl.ARRAY_BUFFER, this.texbuf, gl.STATIC_DRAW);
gl.vertexAttribPointer(2, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, tbuf);
this.tbuf = tbuf
}
this.vbuf = vbuf;
this.cbuf = cbuf;
gl.disableVertexAttribArray(1);
gl.disableVertexAttribArray(2);
this.recalc = 0;
}
on_draw(gl) {
if (!this.iconsheet.ready)
return;
//if (this._dead)
// return;
if (this.verts.length == 0)
return;
if (this.recalc || (this.tdrawbuf != undefined && this.tdrawbuf.is_dead)) {
this.gen_buffers(gl);
}
if (this.ssize != undefined) {
gl.enable(gl.SCISSOR_TEST);
// g_app_state.raster.push_scissor(this.spos, this.ssize);
}
gl.disable(gl.DEPTH_TEST);
gl.enable(gl.BLEND);
gl_blend_func(gl);
//gl.blendEquation(gl.BLEND_EQUATION);
//gl.blendEquationSeparate(gl.BLEND_EQUATION, gl.BLEND_EQUATION);
gl.enableVertexAttribArray(0);
gl.enableVertexAttribArray(1);
gl.disableVertexAttribArray(3);
gl.disableVertexAttribArray(4);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vbuf);
gl.vertexAttribPointer(0, 3, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.vbuf);
gl.bindBuffer(gl.ARRAY_BUFFER, this.cbuf);
gl.vertexAttribPointer(1, 4, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.cbuf);
if (this.use_tex) {
gl.enableVertexAttribArray(2);
gl.bindBuffer(gl.ARRAY_BUFFER, this.tbuf);
gl.vertexAttribPointer(2, 2, gl.FLOAT, false, 0, 0);
gl.bindBuffer(gl.ARRAY_BUFFER, this.tbuf);
} else {
gl.disableVertexAttribArray(2);
}
gl.activeTexture(gl.TEXTURE4);
gl.bindTexture(gl.TEXTURE_2D, this.iconsheet.tex);
gl.useProgram(gl.basic2d.program);
this.global_matrix.setUniform(gl, gl.basic2d.uniformloc(gl, "mat"));
gl.uniform1i(gl.basic2d.uniformloc(gl, "iconsampler"), 4);
//console.log(this.verts);
//console.log(this.tottri*3, this.verts.length/3, this.colors.length/4, this.verts[0], this.verts[1], this.verts[2])
gl.drawArrays(gl.TRIANGLES, 0,this.tottri*3);
gl.disableVertexAttribArray(1);
gl.disableVertexAttribArray(2);
gl.enable(gl.DEPTH_TEST);
if (this.ssize != undefined) {
//g_app_state.raster.pop_scissor();
}
}
}
/*
var MAX_TRILIST_CACHE = 512;
var _trilist_frame_counter = 0;
var _trilist_template = {obj : new TriList(undefined, undefined), cachesize : MAX_TRILIST_CACHE};
function _reset_trilist_frame_counter() {
_trilist_frame_counter = 0;
}
function _new_trilist(UICanvas canvas) {
if (_trilist_frame_counter = 0 >= MAX_TRILIST_CACHE) {
return new TriList(canvas);
} else {
var list = objcache.fetch(_trilist_template)
TriList.call(list, canvas);
}
}
function _save_trilist(TriList trilist) {
if (objcache.is_cache_obj(trilist)) {
objcache.cache_remove(trilist);
}
}
*/
export class TextDraw {
constructor(pos, text, color, spos, ssize, viewport, size, scale, global_matrix, rot=0) {
this._id = _canvas_draw_id++;
this.rot = rot;
this.global_matrix = global_matrix;
this.text = text;
this.pos = [pos[0], pos[1], pos[2]];
this.color = color;
this.tdrawbuf = undefined : TextDrawBuffer;
this.spos = spos;
this.ssize = ssize;
this.asp = viewport[1][1] / viewport[1][0];
this.viewport = viewport;
this.scale = [scale[0], scale[1], 0];
this.size = size;
this.raster = g_app_state.raster;
var mat = new Matrix4();
mat.translate(this.pos[0], this.pos[1], 0.0);
//denormalize to avoid squashed rotations
mat.scale(1, 1.0/this.asp, 1);
mat.rotate(0, 0, rot);
mat.scale(this.scale);
//norrmalize again
mat.scale(1, this.asp, 1);
this.mat = mat;
}
destroy() {
/*don't destroy the gl buffers here, since I'm
now caching them*/
/*
if (this.tdrawbuf != undefined)
this.tdrawbuf.destroy();
this.tdrawbuf = undefined;
*/
}
toString() : String {
return "TD" + this._id;
}
gen_buffers(gl) {
this.tdrawbuf = this.raster.get_font(this.size).gen_text_buffers(gl, this.text, this.color, this.viewport);
return this.tdrawbuf;
}
on_draw(gl) {
static identitymat = new Matrix4();
gl.disableVertexAttribArray(4);
if (this.tdrawbuf == undefined)
this.gen_buffers(gl);
var spos, ssize;
if (this.ssize != undefined) {
spos = CACHEARR3(this.spos[0], this.spos[1], 0);
ssize = CACHEARR3(this.ssize[0], this.ssize[1], 0);
// g_app_state.raster.push_scissor(spos, ssize);
}
static mat = new Matrix4();
mat.load(this.global_matrix);
mat.multiply(this.mat);
this.tdrawbuf.on_draw(gl, mat);
if (this.ssize != undefined) {
//g_app_state.raster.pop_scissor();
}
}
}
var _ls_static_colors = {reallength: 0, length: 0};
window._box_process_clr = function _box_process_clr(default_cs, clr) {
var cs = default_cs;
static arr4 = [0, 0, 0, 0];
if (clr != undefined) {
if (typeof clr == "number") {
var cs2 = arr4;
for (var i=0; i<4; i++) {
cs2[i] = CACHEARR4(cs[i][0], cs[i][1], cs[i][2], cs[i][3]);
for (var j=0; j<4; j++) {
cs2[i] *= clr;
}
}
cs = cs2;
} else if (typeof clr[0] == "number") {
var cs = arr4;
cs[0] = clr; cs[1] = clr; cs[2] = clr; cs[3] = clr;
} else {
cs = clr;
}
}
return cs;
}
//XXX XXX!
export class UICanvas_ {
constructor(viewport) {
static _id = 1;
this._id = _id++;
this.global_matrix = new Matrix4();
this.iconsheet = g_app_state.raster.iconsheet;
this.iconsheet16 = g_app_state.raster.iconsheet16;
this.viewport = viewport;
this.raster = g_app_state.raster;
this.trilist = _talloc.alloc(this, this.transmat);
this.textcache = {};
this.textcachelen = 0;
this.max_textcache = 64;
this.boxcache = new TriListCache();
this.trans_stack = []
this.transmat = new Matrix4()
this.drawlists = [this.trilist]
this.textlist = [];
this.stack = []
this.cache = new hashtable();
this.oldcache = new hashtable();
this.uncached = new Array();
this.uncached.push(this.trilist);
this.scissor_stack = new Array();
this.flag = 0;
}
ensure_trilist() {
if (this.drawlists.length == 0 || !(this.drawlists[this.drawlists.length-1] instanceof TriList)) {
this.new_trilist();
}
}
set_viewport(viewport) {
var bad = false;
for (var i=0; i<3; i++) {
if (viewport[1][i] != this.viewport[1][i])
bad = true;
}
this.viewport = viewport;
if (bad) {
this.on_resize(viewport[1], viewport[1]);
}
}
on_gl_lost(WebGLRenderingContext new_gl) {
this.boxcache.on_gl_lost();
if (this.gl === new_gl) {
console.trace();
console.log("Warning: uicanvas.on_gl_lost() called multiple times");
return;
}
this.gl = new_gl;
this.drawlists = new Array();
this.iconsheet = g_app_state.raster.iconsheet;
this.iconsheet16 = g_app_state.raster.iconsheet16;
this.textcache = {};
this.textcachelen = 0;
this.stack = []
this.raster = g_app_state.raster;
this.cache = new hashtable();
this.oldcache = new hashtable();
this.boxcache = UICanvas.boxcache;
this.new_trilist();
//now that gl data is destroyed,
//call .reset to maintain data structure integrity
this.reset();
}
push_scissor(pos, size) {
var oldpos = pos;
pos = new Vector3([pos[0], pos[1], 0]);
size = new Vector3([size[0], size[1], 0]);
pos.multVecMatrix(this.transmat);
size.multVecMatrix(this.transmat);
var vx=this.viewport[0][0], vy=this.viewport[0][1]
pos[0] += vx; pos[1] += vy;
var dx = pos[0]-oldpos[0]-vx, dy = pos[1]-oldpos[1]-vy;
size[0] -= dx; size[1] -= dy;
for (var i=0; i<3; i++) {
pos[i] = Math.floor(pos[i]);
size[i] = Math.ceil(size[i]);
}
this.scissor_stack.push([pos, size]);
this.new_trilist();
}
pop_scissor() {
this.scissor_stack.pop();
this.new_trilist();
}
new_trilist(Boolean use_small_icons=false) {
//flag canvas for memory leak detection, see active_canvases's definition
active_canvases[this._id] = this;
this.trilist = _talloc.alloc(this, this.transmat, use_small_icons); //new TriList(this, use_small_icons);
if (this.scissor_stack.length > 0) {
this.trilist.spos = this.scissor_stack[this.scissor_stack.length-1][0];
this.trilist.ssize = this.scissor_stack[this.scissor_stack.length-1][1];
}
this.drawlists.push(this.trilist);
return this.trilist;
}
translate(Array<float> off) {
this.transmat.translate(off[0], off[1], 0.0);
}
push_transform(mat=undefined) {
this.trans_stack.push(new Matrix4(this.transmat));
if (mat != undefined)
this.transmat.multiply(mat);
}
pop_transform() {
this.transmat.load(this.trans_stack.pop());
}
frame_begin(Object item) {
return;//XXX
if (DEBUG.ui_canvas) {
console.log("canvas start, stack length: ", this.stack.length);
}
this.new_trilist();
this.stack.push(this.drawlists.length-1);
}
frame_end(Object item) {
return;//XXX
var arr = new Array()
var start = this.stack[this.stack.length-1];
this.stack.pop();
if (DEBUG.ui_canvas)
console.log(start);
for (var i=start; i<this.drawlists.length; i++) {
arr.push(this.drawlists[i]);
}
this.cache.set(item, arr);
this.new_trilist();
if (DEBUG.ui_canvas) {
console.log("canvas end, stack length: ", this.stack.length);
}
return arr;
}
begin(Object item) {
//okay, individual leaf element caching may not have been a good
//idea. . .
//-XXX
return;
if (DEBUG.ui_canvas) {
console.log("canvas start, stack length: ", this.stack.length);
}
this.new_trilist();
this.stack.push(this.drawlists.length-1);
}
end(Object item) {
//-XXX;
return;
var arr = new Array()
var start = this.stack.pop(this.stack.length-1);
if (DEBUG.ui_canvas)
console.log(start);
for (var i=start; i<this.drawlists.length; i++) {
arr.push(this.drawlists[i]);
}
this.stack.pop();
this.cache.set(item, arr);
this.new_trilist();
if (DEBUG.ui_canvas) {
console.log("canvas end, stack length: ", this.stack.length);
}
return arr;
}
use_cache(Object item) {
if (this.oldcache.has(item)) {
var arr = this.oldcache.get(item);
for (var i=0; i<arr.length; i++) {
this.drawlists.push(arr[i]);
if (arr[i] instanceof TextDraw)
this.textlist.push(arr[i]);
}
this.oldcache.remove(item);
this.cache.set(item, arr);
this.new_trilist();
}
}
has_cache(Object item) {
return this.oldcache.has(item);
}
remove_cache(Object item) {
if (this.oldcache.has(item))
this.oldcache.remove(item);
}
textsize(text, size=default_ui_font_size) {
var box = this.raster.get_font(size).calcsize(text);
return [box[0], box[1]];
}
line(v1, v2, c1, c2=c1, width=2.0) {
this.ensure_trilist();
this.line_strip([[v1, v2]], [[c1, c2]], undefined, width);
}
line_strip(lines, colors, texcos, width, half) {//colors,texcos,width are optional
this.ensure_trilist();
if (colors == undefined) {
colors = uicolors["DefaultLine"];
}
if (typeof(colors[0]) == "number") {
var clr = colors;
colors =_ls_static_colors;
for (var i=0; i<lines.length; i++) {
if (colors[i] == undefined) {
colors[i] = [clr, clr];
} else {
colors[i][0] = clr;
colors[i][1] = clr;
}
}
colors.reallength = Math.max(colors.reallength, i);
colors.length = i;
}
this.trilist.line_strip(lines, colors, texcos, width, half);
}
line_loop(points, colors, texcos, width, half) { //colors,texcos,width are optional
var lines = []
this.ensure_trilist();
if (colors == undefined) {
colors = uicolors["DefaultLine"];
}
var lcolors;
if (typeof colors[0] != "number")
lcolors = []
else
lcolors = []
for (var i=0; i<points.length; i++) {
var i2 = (i+1)%points.length;
lines.push([points[i], points[i2]]);
if (typeof(colors[0]) != "number") {
lcolors.push([colors[i], colors[i2]]);
} else {
lcolors.push([colors, colors]);
}
}
this.line_strip(lines, lcolors, undefined, width, half);
}
quad(v1, v2, v3, v4, c1, c2, c3, c4) {
this.ensure_trilist();
if (v1.length == 2)
v1.push(0);
if (v2.length == 2)
v2.push(0);
if (v3.length == 2)
v3.push(0);
if (v4.length == 2)
v4.push(0);
this.trilist.add_quad(v1, v2, v3, v4, c1, c2, c3, c4);
}
quad_aa(v1, v2, v3, v4, c1, c2, c3, c4) {
this.ensure_trilist();
if (v1.length == 2)
v1.push(0);
if (v2.length == 2)
v2.push(0);
if (v3.length == 2)
v3.push(0);
if (v4.length == 2)
v4.push(0);
if (c2 == undefined) {
c2 = c3 = c4 = c1;
}
this.trilist.add_quad(v1, v2, v3, v4, c1, c2, c3, c4);
var lines = [[v1, v4], [v4, v3], [v3, v2], [v2, v1]];
var colors = [[c1, c4], [c4, c3], [c3, c2], [c2, c1]];
this.trilist.line_strip(lines, colors, undefined, undefined, true)
}
tri(v1, v2, v3, c1, c2, c3) {
this.ensure_trilist();
if (v1.length == 2)
v1.push(0);
if (v2.length == 2)
v2.push(0);
if (v3.length == 2)
v3.push(0);
this.trilist.add_tri(v1, v2, v3, c1, c2, c3);
}
on_draw(gl) {
//Set the viewport and projection matrix for the scene
gl.viewport(this.viewport[0][0], this.viewport[0][1], this.viewport[1][0], this.viewport[1][1]);
var len = this.drawlists.length;
for (var i=0; i<len; i++) {
if (DEBUG.canvas_sep_text && this.drawlists[i] instanceof TextDraw)
continue;
this.drawlists[i].on_draw(gl);
}
if (DEBUG.canvas_sep_text) {
var len = this.textlist.length;
for (var i=0; i<len; i++) {
this.textlist[i].on_draw(gl);
}
}
}
arc_points(pos, start, arc, r, steps) {//steps is optional
if (steps == undefined) {
steps = Math.floor(6*arc/Math.PI);
}
var f, df;
var f = start;
var df = arc / steps;
var points = [];
for (var i=0; i<steps+1; i++) {
var x = pos[0] + Math.sin(f)*r;
var y = pos[1] + Math.cos(f)*r;
points.push([x, y, 0]);
f += df;
}
return points;
}
arc(pos, start, arc, r, clr, half) {
if (clr == undefined) {
clr = [0.9, 0.8, 0.7, 0.6];
}
var steps = 18/(2.0 - arc/(Math.PI*2));
var f, df;
var f = start;
var df = arc / steps;
var points = [];
for (var i=0; i<steps+1; i++) {
var x = pos[0] + Math.sin(f)*r;
var y = pos[1] + Math.cos(f)*r;
points.push([x, y, 0]);
f += df;
}
var lines = [];
var colors = [];
for (var i=0; i<points.length-1; i++) {
lines.push([points[i], points[i+1]])
colors.push([clr, clr])
}
colors[0][0] = [1.0, 1.0, 0.0, 1.0]
colors[0][1] = [1.0, 1.0, 0.0, 1.0]
this.trilist.line_strip(lines, colors, undefined, undefined, half);
}
destroy() {
this.reset();
//get rid of any cache data, too
for (var k in this.cache) {
var arr = this.cache.get(k);
for (var i=0; i<arr.length; i++) {
arr[i].destroy();
arr[i] = undefined;
}
}
this.boxcache.destroy();
this.cache = new hashtable();
if (this._id in active_canvases) {
delete active_canvases[this._id];
}
}
reset() {
/*
for (var i=0; i<this.uncached.length; i++) {
this.uncached[i].destroy();
this.uncached[i] = undefined;
}*/
var dmap = {};
for (var k in this.cache) {
var item = this.cache.get(k);
for (var i=0; i<item.length; i++) {
dmap[item[i]._id] = item[i];
}
}
var dl = this.drawlists;
for (var i=0; i<dl.length; i++) {
if (!(dl[i]._id in dmap)) {
dl[i].destroy();
}
}
if (DEBUG.canvas_sep_text) {
var tl = this.textlist;
for (var i=0; i<tl.length; i++) {
tl[i].destroy();
}
this.textlist.length = 0;
}
this.uncached.length = 0;
this.scissor_stack.length = 0;
/*destroy old cache that was used in last draw cycle, then swap it with
the new cache that was *built* last cycle.*/
for (var k in this.oldcache) {
var arr = this.oldcache.get(k)
for (var i=0; i<arr.length; i++) {
arr[i].destroy();
arr[i] = undefined;
}
}
this.oldcache = this.cache;
this.cache = new hashtable();
this.drawlists.length = 0;
if (this.trans_stack.length > 0) {
this.trans_stack[0].makeIdentity();
this.trans_stack.length = 1;
} else {
this.trans_stack.length = 0;
this.trans_stack.push(new Matrix4());
}
this.transmat = this.trans_stack[0];
this.stack.length = 0;
this.new_trilist();
}
invbox(pos, size, clr, r) {
var cs = uicolors["InvBox"]
cs = _box_process_clr(cs, clr);
this.box(pos, size, cs, r);
}
simple_box(pos, size, clr=undefined, r=2.0) { //clr is optional
var cs = uicolors["SimpleBox"]
cs = _box_process_clr(cs, clr);
this.box(pos, size, cs, r);
}
hlightbox(pos, size, clr_mul, r) { //clr_mul is optional
var cs = uicolors["HLightBox"]
/*if (clr != undefined) {
cs = [clr, clr, clr, clr]
}*/
if (clr_mul != undefined) {
cs = [new Vector4(cs[0]), new Vector4(cs[1]), new Vector4(cs[2]), new Vector4(cs[3])]
for (var i=0; i<4; i++) {
for (var j=0; j<4; j++) {
cs[i][j] *= clr_mul;
}
}
}
this.box(pos, size, cs, r);
}
box_outline(pos, size, clr, rfac) {
this.box(pos, size, clr, rfac, true);
}
shadow_box(pos, size, steps=6, margin=[6, 6], clr=uicolors["ShadowBox"]) {
static neg1 = [-2, -2];
//arg, can't remember the correct formula to use here
//x**steps = 0.1
//x = 0.1**(1.0/steps)
var fac = (1.0/steps)*0.4;
var clr = [clr[0], clr[1], clr[2], clr[3]*fac]
pos = new Vector2(pos);
size = new Vector2(size);
expand_rect2d(pos, size, margin);
for (var i=0; i<steps; i++) {
this.box(pos, size, clr);
expand_rect2d(pos, size, neg1);
}
}
box(pos, size, clr, rfac, outline_only) {
if (IsMobile || rfac == 0.0)
return this.box2(pos, size, clr, rfac, outline_only);
else //XXX
return this.box1(pos, size, clr, rfac, outline_only);
}
/* I think this word is Dutch. it comes from photography,
it means to dim the screen around a rectangle of
interest. need to look up the english word.
and no, I'm not Dutch.
*/
passpart(pos, size, clr=[0,0,0,0.5]) {
this.ensure_trilist();
var p = this.viewport[0];
var s = this.viewport[1];
this.box2([p[0], p[1]], [pos[0], s[1]], clr);
this.box2([p[0]+pos[0]+size[0], p[1]], [s[0]-pos[0]-size[0], s[1]], clr);
this.box2([pos[0]+p[0], pos[1]+p[1]+size[1]], [size[0], s[1]-size[1]-p[1]], clr);
this.box2([pos[0]+p[0], p[1]], [size[0], pos[1]], clr)
}
icon(int icon, Array<float> pos, float alpha=1.0, Boolean small=false,
Array<float> clr=undefined)
{
static white = [[1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1, 1]];
var cs = _box_process_clr(white, clr);
for (var i=0; i<4; i++) {
cs[i][3] = alpha;
}
this.ensure_trilist();
if (this.trilist.small_icons != small) {
this.new_trilist(small);
}
this.trilist.icon_quad(icon, pos, cs);
}
box2(Array<float> pos, Array<float> size, Array<float> clr=undefined, float rfac=undefined, Boolean outline_only=false) {
this.ensure_trilist();
var cs = uicolors["Box"];
cs = _box_process_clr(cs, clr);
var x = pos[0], y=pos[1];
var w=size[0], h=size[1];
if (outline_only) {
this.line([pos[0], pos[1]], [pos[0], pos[1]+size[1]], clr, clr, 1.0);
this.line([pos[0], pos[1]+size[1]], [pos[0]+size[0], pos[1]+size[1]], clr, clr, 1.0);
this.line([pos[0]+size[0], pos[1]+size[1]], [pos[0]+size[0], pos[1]], clr, clr, 1.0);
this.line([pos[0]+size[0], pos[1]], [pos[0], pos[1]], clr, clr, 1.0);
} else {
this.trilist.add_quad(CACHEARR3(x, y, 0), CACHEARR3(x+w, y, 0), CACHEARR3(x+w, y+h, 0), CACHEARR3(x, y+h, 0), cs[0], cs[1], cs[2], cs[3]);
}
}
gen_box_trilist(Array<float> size, Array<float> clr=undefined, float rfac=1, Boolean outline_only=false) {
var w=size[0], h=size[1];
var start = 0, ang = Math.PI/2, r = 4;
var cs = _box_process_clr(uicolors["Box"], clr);
var trilist = new TriList(this, new Matrix4(), false);
r /= rfac;
var p1 = this.arc_points(CACHEARR3(0+r+2, 0+r+2, 0), Math.PI, ang, r);
var p2 = this.arc_points(CACHEARR3(0+w-r-2, 0+r+2, 0), Math.PI/2, ang, r);
var p3 = this.arc_points(CACHEARR3(0+w-r-2, 0+h-r-2, 0), 0, ang, r);
var p4 = this.arc_points(CACHEARR3(0+r+2, 0+h-r-2, 0), -Math.PI/2, ang, r);
var plen = p1.length;
p4.reverse(); p3.reverse();
p2.reverse(); p1.reverse();
var points = [];
for (var i=0; i<p1.length; i++) {
points.push(p1[i]);
}
for (var i=0; i<p2.length; i++) {
points.push(p2[i]);
p1.push(p2[i]);
}
for (var i=0; i<p3.length; i++) {
points.push(p3[i]);
}
p2 = p3;
for (var i=0; i<p4.length; i++) {
p2.push(p4[i]);
points.push(p4[i]);
}
p2.reverse();
var plen = p1.length;
function color(i) {
if (i < plen) return cs[0];
else if (i < plen*2) return cs[1];
else if (i < plen*3) return cs[2];
else if (i <= plen*4+1) return cs[3];
}
static v1 = new Vector3(), v2 = new Vector3(), v3 = new Vector3(), v4 = new Vector3();
#define LOAD_CLR(a, b) a[0] = b[0]; a[1] = b[1]; a[2] = b[2];
if (!outline_only) {
for (var i=0; i<p1.length-1; i++) {
var i1 = i;
var i2 = i+plen*2;
var i3 = i + 1+plen*2;
var i4 = i+1;
LOAD_CLR(v1, p1[i]);
LOAD_CLR(v2, p2[i]);
LOAD_CLR(v3, p2[i+1]);
LOAD_CLR(v4, p1[i+1]);
trilist.add_quad(v1, v2, v3, v4, color(i1), color(i2), color(i3), color(i4));
}
}
var lines = [];
var colors = [];
static pairs = [];
for (var i=0; i<points.length; i++) {
LOAD_CLR(v1, points[(i+1)%points.length]);
LOAD_CLR(v2, points[i]);
if (pairs.length <= i) {
pairs.push([[0, 0], [0, 0]]);
}
pairs[i][0][0] = CACHEARR3(v1[0], v1[1], 0);
pairs[i][0][1] = CACHEARR3(v2[0], v2[1], 0);
lines.push(pairs[i][0]);
pairs[i][1][0] = color((i+1)%points.length);
pairs[i][1][1] = color(i);
colors.push(pairs[i][1]);
}
#undef LOAD_CLR
trilist.line_strip(lines, colors, undefined, outline_only ? 1.4 : 1.5, !outline_only);
return trilist;
}
box1(Array<float> pos, Array<float> size, Array<float> clr=[0, 0, 0, 1],
float rfac=1, Boolean outline_only=false)
{
var sclr = clr==undefined ? "u" : clr.toString();
var hash = size.toString()+sclr+rfac.toString()+(outline_only ? "|1" : "|0");
var cache = this.boxcache;
if (!cache.has(hash)) {
cache.set(hash, this.gen_box_trilist(size, clr, rfac, outline_only));
}
static co = new Vector3();
co.loadxy(pos);
co[2] = 0.0;
co.multVecMatrix(this.transmat);
var viewport = g_app_state.raster.viewport;
var sx = viewport[1][0];
var sy = viewport[1][1];
co[0] = (Math.floor(co[0])/sx)*2.0;// - 1.0;
co[1] = (Math.floor(co[1])/sy)*2.0;// - 1.0;
var mat = new Matrix4();
mat.translate(co[0], co[1], 0.0);
var ret = new TriListRef(this.gl, cache.get(hash), mat, this);
this.drawlists.push(ret);
return ret;
}
box1_old(Array<float> pos, Array<float> size, Array<float> clr=undefined,
float rfac=undefined, Boolean outline_only=false)
{
var c1, c2, c3, c4;
var cs = uicolors["Box"];
static cache = {};
if (outline_only == undefined)
outline_only = false;
cs = _box_process_clr(cs, clr);
var x = Math.floor(pos[0]), y=Math.floor(pos[1]);
var w=size[0], h=size[1];
var start = 0;
var ang = Math.PI/2;
var r = 4 //Math.sqrt(size[0]*size[1])
if (rfac == undefined)
rfac = 1;
var hash = size[0].toString() + " " + size[1] + " " + rfac;
if (!(hash in cache)) {
r /= rfac;
var p1 = this.arc_points(CACHEARR3(0+r+2, 0+r+2, 0), Math.PI, ang, r);
var p2 = this.arc_points(CACHEARR3(0+w-r-2, 0+r+2, 0), Math.PI/2, ang, r);
var p3 = this.arc_points(CACHEARR3(0+w-r-2, 0+h-r-2, 0), 0, ang, r);
var p4 = this.arc_points(CACHEARR3(0+r+2, 0+h-r-2, 0), -Math.PI/2, ang, r);
var plen = p1.length;
p4.reverse();
p3.reverse();
p2.reverse();
p1.reverse();
var points = []
for (var i=0; i<p1.length; i++) {
points.push(p1[i]);
}
for (var i=0; i<p2.length; i++) {
points.push(p2[i]);
p1.push(p2[i]);
}
for (var i=0; i<p3.length; i++) {
points.push(p3[i]);
}
p2 = p3;
for (var i=0; i<p4.length; i++) {
p2.push(p4[i]);
points.push(p4[i]);
}
p2.reverse();
cache[hash] = [p1, p2, points];
}
var cp = cache[hash];
var p1 = cp[0];
var p2 = cp[1];
var points = cp[2];
var plen = p1.length;
function color(i) {
if (i < plen) return cs[0];
else if (i < plen*2) return cs[1];
else if (i < plen*3) return cs[2];
else if (i <= plen*4+1) return cs[3];
}
static v1 = new Vector3(), v2 = new Vector3(), v3 = new Vector3(), v4 = new Vector3();
#define LOAD_CLR(a, b) a[0] = b[0]+x; a[1] = b[1]+y; a[2] = b[2];
if (!outline_only) {
for (var i=0; i<p1.length-1; i++) {
var i1 = i;
var i2 = i+plen*2;
var i3 = i + 1+plen*2;
var i4 = i+1;
LOAD_CLR(v1, p1[i]);
LOAD_CLR(v2, p2[i]);
LOAD_CLR(v3, p2[i+1]);
LOAD_CLR(v4, p1[i+1]);
this.trilist.add_quad(v1, v2, v3, v4, color(i1), color(i2), color(i3), color(i4));
}
}
var lines = []
var colors = []
static pairs = [];
for (var i=0; i<points.length; i++) {
LOAD_CLR(v1, points[(i+1)%points.length]);
LOAD_CLR(v2, points[i]);
if (pairs.length <= i) {
pairs.push([[0, 0], [0, 0]]);
}
pairs[i][0][0] = CACHEARR3(v1[0], v1[1], 0);
pairs[i][0][1] = CACHEARR3(v2[0], v2[1], 0);
lines.push(pairs[i][0]);
pairs[i][1][0] = color((i+1)%points.length);
pairs[i][1][1] = color(i);
colors.push(pairs[i][1]);
}
#undef LOAD_CLR
this.trilist.line_strip(lines, colors, undefined, 4, true);
//this.box2(pos, size, clr, rfac, outline_only);
return this.trilist
}
on_resize(newsize, oldsize) {
this.boxcache.destroy();
//all cache entries with old size are now bad
for (var k in this.textcache) {
if (!this.textcache.hasOwnProperty(k)) continue;
this.textcache[k].destroy();
}
this.textcache = {};
this.textcachelen = 0;
//clear entire cache
this.destroy();
this.reset();
}
text(Array<float> pos, String text, Array<float> color, float size,
float scale, float rot, Array<float> scissor_pos, Array<float> scissor_size)
{
static loc = new Vector3();
if (rot == undefined)
rot = 0.0;
if (size == undefined)
size = default_ui_font_size;
if (scale == undefined) {
scale = CACHEARR3(1.0, 1.0, 1.0);
} else if (typeof(scale) == "number") {
scale = CACHEARR3(scale, scale, scale);
}
if (color == undefined) {
color = uicolors["DefaultText"]
}
if (scissor_pos == undefined) {
if (this.scissor_stack.length > 0) {
scissor_pos = this.scissor_stack[this.scissor_stack.length-1][0];
scissor_size = this.scissor_stack[this.scissor_stack.length-1][1];
}
} else {
scissor_pos = new Vector3([scissor_pos[0], scissor_pos[1], 0]);
scissor_size = new Vector3([scissor_size[0], scissor_size[1], 0]);
scissor_pos.multVecMatrix(this.transmat);
}
loc[0] = 0; loc[1] = 0; loc[2] = 0;
loc.multVecMatrix(this.transmat);
loc[0] += pos[0]
loc[1] += pos[1]
//yes, raster is supposed to be a nasty global
var port = g_app_state.raster.viewport
var sx = port[1][0]
var sy = port[1][1]
loc[0] = (Math.floor(loc[0])/sx)*2.0; //*2.0-1.0;
loc[1] = (Math.floor(loc[1])/sy)*2.0; //*2.0-1.0;
var textdraw = new TextDraw(loc, text, color, scissor_pos,
scissor_size, this.viewport, size, scale,
this.global_matrix, rot);
var hash = text.toString() + ">>" + size + "|" + color + "|" + JSON.stringify(this.viewport);
//XXX
// /*
if (!(hash in this.textcache)) {
if (this.textcachelen > this.max_textcache) {
var keys = Object.getOwnPropertyNames(this.textcache)
for (i=0; i<keys.length; i++) {
var k = keys[i];
this.textcache[k].destroy();
var users = this.textcache[k].users;
//possible evil!
for (var j=0; j<users.length; j++) {
users[j].recalc = true;
users[j].tdrawbuf = undefined;
}
delete this.textcache[k];
this.textcachelen--;
//amortize cache destruction calls
if (this.textcachelen < this.max_textcache/3)
break;
}
}
this.textcache[hash] = textdraw.gen_buffers(g_app_state.gl);
this.textcachelen++;
} else {
textdraw.tdrawbuf = this.textcache[hash];
}
this.textcache[hash].users.push(textdraw);
//-XXX
if (DEBUG.canvas_sep_text) {
this.textlist.push(textdraw);
this.drawlists.push(textdraw);
if (this.stack.length == 0) {
this.uncached.push(textdraw);
}
return loc;
} else {
if (this.drawlists[this.drawlists.length-1] == this.trilist) {
this.drawlists.push(textdraw);
this.new_trilist();
if (this.stack.length == 0) {
this.uncached.push(textdraw);
this.uncached.push(this.trilist);
}
} else {
this.drawlists.push(textdraw);
if (this.stack.length == 0) {
this.uncached.push(textdraw);
}
}
// */
return loc;
}
}
}
| joeedh/webblender | src/editors/interface/UICanvas.js | JavaScript | apache-2.0 | 46,545 |
/* Copyright 2014 Jonathan Holland.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Wave
{
public interface IOutboundMessageFilter
{
bool OnMessagePublished(string routeKey, object message);
}
}
| WaveServiceBus/WaveServiceBus | src/Wave.Core/Interfaces/IOutboundMessageFilter.cs | C# | apache-2.0 | 735 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Data;
namespace CJia.Health.Presenters.Web
{
public class MyPicturePresenter:CJia.Health.Tools.PresenterPage<Models.Web.MyPictureModel,Views.Web.IMyPictureView>
{
public MyPicturePresenter(Views.Web.IMyPictureView view)
: base(view)
{
view.OnLoadPicture += view_OnLoadPicture;
view.OnProjectChanged += view_OnProjectChanged;
}
void view_OnProjectChanged(object sender, Views.Web.MyPictureArgs e)
{
DataTable data = Model.GetMyPictureByProID(e.HealthID, e.ProjectID);
View.ExeBindPictureByProjectID(data);
}
void view_OnLoadPicture(object sender, Views.Web.MyPictureArgs e)
{
DataTable project = Model.GetMyPictureProject(e.HealthID);
DataTable picture = Model.GetMyPicture(e.HealthID);
View.ExeBindPicture(picture);
View.ExeBindProject(project);
}
}
}
| leborety/CJia | CJia.HealthFileProject/CJia.Health/Presenters/Web/MyPicturePresenter.cs | C# | apache-2.0 | 1,049 |
$.FAQ = function(){
$self = this;
this.url = "/faq"
this.send = function(inputs){
var params = new FormData();
// var csrf = $("#csrf").val();
// params.append("csrf_ID", csrf);
$.each(inputs, function(key, val){
params.append(key,val);
});
$.ajax({
url : $self.url,
type: 'POST',
async: true,
processData: false,
data: params,
success: function(response){
response = JSON.parse(response);
$self.fillQAs(response.qadata);
$self.createPagination(response.metadata)
},
});
};
this.fillQAs = function(data){
var qaBox = $("#faq-container .faq-item").clone();
$("#faq-container .faq-item").remove();
$.each(data, function(obj){
var $div = qaBox.clone();
$div.find(".faq-item-question h2").html(obj.question);
$div.find(".faq-item-answer p").html(obj.answer);
});
};
this.createPagination = function(metadata){
};
this.load = function(data){
var limit = (data.limit > 0) ? data.limit : 5;
var offset = (data.page_num - 1)*limit;
var inputs = {
action : 'loadFaq',
limit : limit,
offset : offset
};
$self.send(inputs);
};
this.init = function(){
var inputs = {
limit : 5,
page_num : 1
};
$self.load(inputs);
};
};
| ajaykiet2/htdocs | assets/js/website/faq.js | JavaScript | apache-2.0 | 1,284 |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.testdriver.rule;
import java.math.BigDecimal;
import java.text.MessageFormat;
/**
* Accepts iff actual decimal is in [ expected + lower-bound, expected + upper-bound ].
* @since 0.2.0
*/
public class DecimalRange implements ValuePredicate<BigDecimal> {
private final BigDecimal lowerBound;
private final BigDecimal upperBound;
/**
* Creates a new instance.
* @param lowerBound lower bound offset from expected value
* @param upperBound upper bound offset from expected value
*/
public DecimalRange(BigDecimal lowerBound, BigDecimal upperBound) {
this.lowerBound = lowerBound;
this.upperBound = upperBound;
}
@Override
public boolean accepts(BigDecimal expected, BigDecimal actual) {
if (expected == null || actual == null) {
throw new IllegalArgumentException();
}
return expected.add(lowerBound).compareTo(actual) <= 0
&& actual.compareTo(expected.add(upperBound)) <= 0;
}
@Override
public String describeExpected(BigDecimal expected, BigDecimal actual) {
if (expected == null) {
return "(error)"; //$NON-NLS-1$
}
return MessageFormat.format(
"{0} ~ {1}", //$NON-NLS-1$
Util.format(expected.add(lowerBound)),
Util.format(expected.add(upperBound)));
}
}
| cocoatomo/asakusafw | testing-project/asakusa-test-moderator/src/main/java/com/asakusafw/testdriver/rule/DecimalRange.java | Java | apache-2.0 | 2,011 |